123 lines
3.4 KiB
Bash
Executable File
123 lines
3.4 KiB
Bash
Executable File
#!/bin/bash
|
|
# Backup script for Supabase Storage or filesystem storage
|
|
# Usage: ./backup-storage.sh [--supabase|--filesystem]
|
|
|
|
set -e
|
|
|
|
BACKUP_DIR="${BACKUP_DIR:-/opt/backups/storage}"
|
|
DATE=$(date +%Y%m%d_%H%M%S)
|
|
RETENTION_DAYS="${RETENTION_DAYS:-30}"
|
|
|
|
# Colors for output
|
|
RED='\033[0;31m'
|
|
GREEN='\033[0;32m'
|
|
YELLOW='\033[1;33m'
|
|
NC='\033[0m' # No Color
|
|
|
|
log() {
|
|
echo -e "${GREEN}[$(date +'%Y-%m-%d %H:%M:%S')]${NC} $1"
|
|
}
|
|
|
|
error() {
|
|
echo -e "${RED}[ERROR]${NC} $1" >&2
|
|
}
|
|
|
|
warn() {
|
|
echo -e "${YELLOW}[WARN]${NC} $1"
|
|
}
|
|
|
|
# Create backup directory
|
|
mkdir -p "$BACKUP_DIR"
|
|
|
|
# Determine storage type
|
|
STORAGE_TYPE="${1:-auto}"
|
|
|
|
if [ "$STORAGE_TYPE" = "auto" ]; then
|
|
# Check if Supabase Storage is configured
|
|
if [ -n "$NEXT_PUBLIC_SUPABASE_URL" ] && [ -n "$SUPABASE_SERVICE_ROLE_KEY" ]; then
|
|
STORAGE_TYPE="supabase"
|
|
else
|
|
STORAGE_TYPE="filesystem"
|
|
fi
|
|
fi
|
|
|
|
log "Starting storage backup (type: $STORAGE_TYPE)..."
|
|
|
|
if [ "$STORAGE_TYPE" = "supabase" ]; then
|
|
# Supabase Storage backup
|
|
log "Backing up Supabase Storage..."
|
|
|
|
if [ -z "$NEXT_PUBLIC_SUPABASE_URL" ] || [ -z "$SUPABASE_SERVICE_ROLE_KEY" ]; then
|
|
error "Supabase environment variables not set"
|
|
exit 1
|
|
fi
|
|
|
|
BACKUP_FILE="$BACKUP_DIR/storage_supabase_$DATE.tar.gz"
|
|
|
|
# Use Supabase CLI or API to list and download files
|
|
# This is a placeholder - actual implementation depends on Supabase setup
|
|
log "Note: Supabase Storage backup requires manual setup or Supabase CLI"
|
|
log "Backup file: $BACKUP_FILE"
|
|
|
|
# Example: If using Supabase CLI
|
|
# supabase storage download gallery --output "$BACKUP_DIR/storage_$DATE" || {
|
|
# error "Failed to download Supabase Storage files"
|
|
# exit 1
|
|
# }
|
|
|
|
warn "Supabase Storage backup not fully automated. Manual backup recommended."
|
|
|
|
elif [ "$STORAGE_TYPE" = "filesystem" ]; then
|
|
# Filesystem storage backup
|
|
log "Backing up filesystem storage..."
|
|
|
|
STORAGE_PATH="${STORAGE_PATH:-./public/uploads}"
|
|
|
|
if [ ! -d "$STORAGE_PATH" ]; then
|
|
warn "Storage path not found: $STORAGE_PATH"
|
|
exit 0
|
|
fi
|
|
|
|
BACKUP_FILE="$BACKUP_DIR/storage_filesystem_$DATE.tar.gz"
|
|
|
|
# Create tar archive
|
|
tar -czf "$BACKUP_FILE" -C "$(dirname "$STORAGE_PATH")" "$(basename "$STORAGE_PATH")" || {
|
|
error "Failed to create storage backup"
|
|
exit 1
|
|
}
|
|
|
|
BACKUP_SIZE=$(du -h "$BACKUP_FILE" | cut -f1)
|
|
log "Storage backup created: $BACKUP_FILE ($BACKUP_SIZE)"
|
|
|
|
else
|
|
error "Invalid storage type: $STORAGE_TYPE"
|
|
error "Use: --supabase, --filesystem, or auto (default)"
|
|
exit 1
|
|
fi
|
|
|
|
# Verify backup file exists and is not empty
|
|
if [ ! -f "$BACKUP_FILE" ] || [ ! -s "$BACKUP_FILE" ]; then
|
|
error "Backup file is missing or empty"
|
|
exit 1
|
|
fi
|
|
|
|
# Clean up old backups
|
|
log "Cleaning up backups older than $RETENTION_DAYS days..."
|
|
find "$BACKUP_DIR" -name "storage_*.tar.gz" -type f -mtime +$RETENTION_DAYS -delete || {
|
|
warn "Failed to clean up old backups"
|
|
}
|
|
|
|
REMAINING_BACKUPS=$(find "$BACKUP_DIR" -name "storage_*.tar.gz" -type f | wc -l)
|
|
log "Remaining backups: $REMAINING_BACKUPS"
|
|
|
|
# Optional: Upload to off-site storage
|
|
if [ -n "$BACKUP_S3_BUCKET" ] && command -v aws &> /dev/null; then
|
|
log "Uploading backup to S3..."
|
|
aws s3 cp "$BACKUP_FILE" "s3://$BACKUP_S3_BUCKET/storage/" || {
|
|
warn "Failed to upload to S3, but local backup succeeded"
|
|
}
|
|
fi
|
|
|
|
log "Storage backup completed successfully!"
|
|
log "Backup file: $BACKUP_FILE"
|