#!/bin/bash
# Comprehensive Backup System for ArXiv Subscription Platform

set -e

# Configuration
BACKUP_BASE_DIR="${BACKUP_BASE_DIR:-/var/backups/arxiv-platform}"
RETENTION_DAYS="${RETENTION_DAYS:-30}"
S3_BUCKET="${S3_BUCKET:-}"
S3_REGION="${S3_REGION:-us-west-2}"
ENCRYPTION_KEY_FILE="${ENCRYPTION_KEY_FILE:-/etc/arxiv-platform/backup.key}"
COMPRESS_BACKUPS="${COMPRESS_BACKUPS:-true}"
EMAIL_NOTIFICATIONS="${EMAIL_NOTIFICATIONS:-true}"
NOTIFICATION_EMAIL="${NOTIFICATION_EMAIL:-admin@example.com}"

# Database configuration
DB_HOST="${DB_HOST:-localhost}"
DB_PORT="${DB_PORT:-5432}"
DB_NAME="${DB_NAME:-arxiv_platform}"
DB_USER="${DB_USER:-postgres}"
DB_PASSWORD="${DB_PASSWORD:-}"
export PGPASSWORD="$DB_PASSWORD"

# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'

log() {
    echo -e "${GREEN}[$(date '+%Y-%m-%d %H:%M:%S')]${NC} $1" | tee -a "$LOG_FILE"
}

warn() {
    echo -e "${YELLOW}[$(date '+%Y-%m-%d %H:%M:%S')] WARNING:${NC} $1" | tee -a "$LOG_FILE"
}

error() {
    echo -e "${RED}[$(date '+%Y-%m-%d %H:%M:%S')] ERROR:${NC} $1" | tee -a "$LOG_FILE"
    send_notification "BACKUP FAILED" "$1"
    exit 1
}

info() {
    echo -e "${BLUE}[$(date '+%Y-%m-%d %H:%M:%S')] INFO:${NC} $1" | tee -a "$LOG_FILE"
}

setup_backup_environment() {
    log "Setting up backup environment..."
    
    # Create backup directories
    local dirs=(
        "$BACKUP_BASE_DIR"
        "$BACKUP_BASE_DIR/database"
        "$BACKUP_BASE_DIR/files"
        "$BACKUP_BASE_DIR/configs"
        "$BACKUP_BASE_DIR/logs"
        "$BACKUP_BASE_DIR/docker-volumes"
        "$(dirname "$LOG_FILE")"
    )
    
    for dir in "${dirs[@]}"; do
        mkdir -p "$dir"
        chmod 700 "$dir"
    done
    
    # Generate encryption key if it doesn't exist
    if [[ ! -f "$ENCRYPTION_KEY_FILE" && "$ENCRYPT_BACKUPS" == "true" ]]; then
        mkdir -p "$(dirname "$ENCRYPTION_KEY_FILE")"
        openssl rand -base64 32 > "$ENCRYPTION_KEY_FILE"
        chmod 600 "$ENCRYPTION_KEY_FILE"
        info "Generated new encryption key"
    fi
    
    log "Backup environment ready"
}

send_notification() {
    local subject="$1"
    local message="$2"
    
    if [[ "$EMAIL_NOTIFICATIONS" == "true" && -n "$NOTIFICATION_EMAIL" ]]; then
        if command -v mail >/dev/null 2>&1; then
            echo "$message" | mail -s "[$HOSTNAME] ArXiv Platform: $subject" "$NOTIFICATION_EMAIL"
        elif command -v sendmail >/dev/null 2>&1; then
            {
                echo "To: $NOTIFICATION_EMAIL"
                echo "Subject: [$HOSTNAME] ArXiv Platform: $subject"
                echo "From: backups@$(hostname -f)"
                echo ""
                echo "$message"
            } | sendmail "$NOTIFICATION_EMAIL"
        fi
    fi
}

encrypt_file() {
    local input_file="$1"
    local output_file="$2"
    
    if [[ "$ENCRYPT_BACKUPS" == "true" && -f "$ENCRYPTION_KEY_FILE" ]]; then
        openssl enc -aes-256-cbc -salt -in "$input_file" -out "$output_file" -pass file:"$ENCRYPTION_KEY_FILE"
        rm "$input_file"
    else
        mv "$input_file" "$output_file"
    fi
}

compress_file() {
    local file="$1"
    
    if [[ "$COMPRESS_BACKUPS" == "true" ]]; then
        gzip "$file"
        echo "${file}.gz"
    else
        echo "$file"
    fi
}

backup_database() {
    log "Starting database backup..."
    
    local timestamp=$(date +%Y%m%d_%H%M%S)
    local backup_file="$BACKUP_BASE_DIR/database/arxiv_platform_${timestamp}.sql"
    local final_file
    
    # Test database connection
    if ! pg_isready -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" >/dev/null 2>&1; then
        error "Database is not accessible"
    fi
    
    # Create database dump
    info "Creating database dump..."
    pg_dump -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" \
        --format=custom \
        --compress=9 \
        --verbose \
        --file="$backup_file" \
        --no-password
    
    if [[ $? -eq 0 ]]; then
        local file_size=$(du -h "$backup_file" | cut -f1)
        info "Database dump created: $file_size"
        
        # Compress and encrypt
        if [[ "$COMPRESS_BACKUPS" == "true" ]]; then
            info "Compressing database backup..."
            gzip "$backup_file"
            backup_file="${backup_file}.gz"
        fi
        
        if [[ "$ENCRYPT_BACKUPS" == "true" ]]; then
            info "Encrypting database backup..."
            encrypt_file "$backup_file" "${backup_file}.enc"
            final_file="${backup_file}.enc"
        else
            final_file="$backup_file"
        fi
        
        # Upload to S3 if configured
        if [[ -n "$S3_BUCKET" ]]; then
            upload_to_s3 "$final_file" "database/"
        fi
        
        log "Database backup completed: $(basename "$final_file")"
    else
        error "Database backup failed"
    fi
    
    echo "$final_file"
}

backup_application_files() {
    log "Starting application files backup..."
    
    local timestamp=$(date +%Y%m%d_%H%M%S)
    local backup_file="$BACKUP_BASE_DIR/files/app_files_${timestamp}.tar"
    local final_file
    
    # Application directories to backup
    local app_dirs=(
        "/opt/arxiv-platform"
        "/etc/nginx/sites-available"
        "/etc/nginx/sites-enabled"
        "/etc/ssl/certs/arxiv"
        "/etc/ssl/private/arxiv"
    )
    
    # Create tar archive
    info "Creating application files archive..."
    tar -cf "$backup_file" \
        --exclude="*.log" \
        --exclude="node_modules" \
        --exclude=".git" \
        --exclude="__pycache__" \
        --exclude="*.pyc" \
        "${app_dirs[@]}" 2>/dev/null || true
    
    if [[ -f "$backup_file" ]]; then
        local file_size=$(du -h "$backup_file" | cut -f1)
        info "Application files archive created: $file_size"
        
        # Compress
        final_file=$(compress_file "$backup_file")
        
        # Encrypt if enabled
        if [[ "$ENCRYPT_BACKUPS" == "true" ]]; then
            encrypt_file "$final_file" "${final_file}.enc"
            final_file="${final_file}.enc"
        fi
        
        # Upload to S3 if configured
        if [[ -n "$S3_BUCKET" ]]; then
            upload_to_s3 "$final_file" "files/"
        fi
        
        log "Application files backup completed: $(basename "$final_file")"
    else
        warn "Application files backup failed or no files found"
    fi
    
    echo "$final_file"
}

backup_docker_volumes() {
    log "Starting Docker volumes backup..."
    
    local timestamp=$(date +%Y%m%d_%H%M%S)
    local backup_base="$BACKUP_BASE_DIR/docker-volumes"
    
    # Get list of ArXiv platform volumes
    local volumes=$(docker volume ls -q | grep -E "(arxiv|prometheus|grafana)" || true)
    
    if [[ -z "$volumes" ]]; then
        info "No Docker volumes found to backup"
        return 0
    fi
    
    for volume in $volumes; do
        info "Backing up Docker volume: $volume"
        
        local volume_backup="$backup_base/${volume}_${timestamp}.tar"
        
        # Create volume backup using a temporary container
        docker run --rm \
            -v "$volume":/source:ro \
            -v "$backup_base":/backup \
            alpine tar czf "/backup/$(basename "$volume_backup").gz" -C /source .
        
        if [[ -f "${volume_backup}.gz" ]]; then
            local final_file="${volume_backup}.gz"
            
            # Encrypt if enabled
            if [[ "$ENCRYPT_BACKUPS" == "true" ]]; then
                encrypt_file "$final_file" "${final_file}.enc"
                final_file="${final_file}.enc"
            fi
            
            # Upload to S3 if configured
            if [[ -n "$S3_BUCKET" ]]; then
                upload_to_s3 "$final_file" "volumes/"
            fi
            
            info "Docker volume backup completed: $volume"
        else
            warn "Docker volume backup failed: $volume"
        fi
    done
    
    log "Docker volumes backup completed"
}

backup_system_configs() {
    log "Starting system configuration backup..."
    
    local timestamp=$(date +%Y%m%d_%H%M%S)
    local backup_file="$BACKUP_BASE_DIR/configs/system_configs_${timestamp}.tar"
    local final_file
    
    # System configuration files to backup
    local config_files=(
        "/etc/hosts"
        "/etc/crontab"
        "/etc/systemd/system/arxiv-platform.service"
        "/etc/logrotate.d/arxiv-platform"
        "/etc/fail2ban/jail.local"
        "/etc/ufw/applications.d/arxiv-platform"
        "/usr/local/bin/daily-db-maintenance.sh"
        "/usr/local/bin/weekly-db-maintenance.sh"
        "/usr/local/bin/check-ssl-expiry.sh"
    )
    
    # Create configuration backup
    info "Creating system configuration archive..."
    tar -cf "$backup_file" "${config_files[@]}" 2>/dev/null || true
    
    if [[ -f "$backup_file" ]]; then
        final_file=$(compress_file "$backup_file")
        
        if [[ "$ENCRYPT_BACKUPS" == "true" ]]; then
            encrypt_file "$final_file" "${final_file}.enc"
            final_file="${final_file}.enc"
        fi
        
        if [[ -n "$S3_BUCKET" ]]; then
            upload_to_s3 "$final_file" "configs/"
        fi
        
        log "System configuration backup completed: $(basename "$final_file")"
    else
        warn "System configuration backup failed"
    fi
    
    echo "$final_file"
}

backup_logs() {
    log "Starting logs backup..."
    
    local timestamp=$(date +%Y%m%d_%H%M%S)
    local backup_file="$BACKUP_BASE_DIR/logs/logs_${timestamp}.tar"
    local final_file
    
    # Log directories to backup
    local log_dirs=(
        "/var/log/arxiv-platform"
        "/var/log/nginx"
        "/var/log/postgresql"
    )
    
    # Create logs backup (last 7 days only)
    info "Creating logs archive (last 7 days)..."
    find "${log_dirs[@]}" -type f -mtime -7 -name "*.log*" 2>/dev/null | \
        xargs tar -cf "$backup_file" 2>/dev/null || true
    
    if [[ -f "$backup_file" && -s "$backup_file" ]]; then
        final_file=$(compress_file "$backup_file")
        
        if [[ "$ENCRYPT_BACKUPS" == "true" ]]; then
            encrypt_file "$final_file" "${final_file}.enc"
            final_file="${final_file}.enc"
        fi
        
        if [[ -n "$S3_BUCKET" ]]; then
            upload_to_s3 "$final_file" "logs/"
        fi
        
        log "Logs backup completed: $(basename "$final_file")"
    else
        info "No recent logs found to backup"
        rm -f "$backup_file"
    fi
    
    echo "$final_file"
}

upload_to_s3() {
    local file="$1"
    local s3_prefix="$2"
    local s3_key="${s3_prefix}$(basename "$file")"
    
    info "Uploading to S3: s3://$S3_BUCKET/$s3_key"
    
    if command -v aws >/dev/null 2>&1; then
        if aws s3 cp "$file" "s3://$S3_BUCKET/$s3_key" --region "$S3_REGION"; then
            info "Successfully uploaded to S3"
        else
            warn "Failed to upload to S3"
        fi
    else
        warn "AWS CLI not available, skipping S3 upload"
    fi
}

cleanup_old_backups() {
    log "Cleaning up old backups (older than $RETENTION_DAYS days)..."
    
    # Local cleanup
    find "$BACKUP_BASE_DIR" -type f -mtime +$RETENTION_DAYS -delete
    
    # S3 cleanup if configured
    if [[ -n "$S3_BUCKET" ]] && command -v aws >/dev/null 2>&1; then
        info "Cleaning up old S3 backups..."
        
        local cutoff_date=$(date -d "$RETENTION_DAYS days ago" +%Y-%m-%d)
        
        aws s3 ls "s3://$S3_BUCKET/" --recursive | \
            awk "\$1 < \"$cutoff_date\" {print \$4}" | \
            while read -r key; do
                aws s3 rm "s3://$S3_BUCKET/$key"
            done
    fi
    
    log "Cleanup completed"
}

verify_backup() {
    local backup_file="$1"
    local backup_type="$2"
    
    info "Verifying $backup_type backup..."
    
    if [[ ! -f "$backup_file" ]]; then
        warn "$backup_type backup file not found: $backup_file"
        return 1
    fi
    
    case "$backup_type" in
        "database")
            if [[ "$backup_file" =~ \.gz$ ]]; then
                if gzip -t "$backup_file"; then
                    info "Database backup integrity verified"
                else
                    error "Database backup integrity check failed"
                fi
            elif [[ "$backup_file" =~ \.enc$ ]]; then
                info "Encrypted backup - integrity check skipped"
            else
                # Test PostgreSQL custom format
                if pg_restore -l "$backup_file" >/dev/null 2>&1; then
                    info "Database backup integrity verified"
                else
                    error "Database backup integrity check failed"
                fi
            fi
            ;;
        "files"|"configs"|"logs")
            if [[ "$backup_file" =~ \.gz$ ]]; then
                if gzip -t "$backup_file"; then
                    info "$backup_type backup integrity verified"
                else
                    error "$backup_type backup integrity check failed"
                fi
            elif [[ "$backup_file" =~ \.tar$ ]]; then
                if tar -tf "$backup_file" >/dev/null 2>&1; then
                    info "$backup_type backup integrity verified"
                else
                    error "$backup_type backup integrity check failed"
                fi
            else
                info "Encrypted backup - integrity check skipped"
            fi
            ;;
    esac
}

generate_backup_report() {
    local start_time="$1"
    local end_time="$2"
    local db_backup="$3"
    local files_backup="$4"
    local configs_backup="$5"
    local logs_backup="$6"
    
    local duration=$((end_time - start_time))
    local report_file="$BACKUP_BASE_DIR/backup_report_$(date +%Y%m%d_%H%M%S).txt"
    
    cat > "$report_file" << EOF
ArXiv Subscription Platform Backup Report
=========================================

Date: $(date)
Hostname: $(hostname)
Duration: ${duration} seconds

Backup Files Created:
--------------------
Database: $(basename "$db_backup" 2>/dev/null || echo "Failed")
  Size: $(du -h "$db_backup" 2>/dev/null | cut -f1 || echo "N/A")

Application Files: $(basename "$files_backup" 2>/dev/null || echo "Failed") 
  Size: $(du -h "$files_backup" 2>/dev/null | cut -f1 || echo "N/A")

System Configs: $(basename "$configs_backup" 2>/dev/null || echo "Failed")
  Size: $(du -h "$configs_backup" 2>/dev/null | cut -f1 || echo "N/A")

Logs: $(basename "$logs_backup" 2>/dev/null || echo "Failed")
  Size: $(du -h "$logs_backup" 2>/dev/null | cut -f1 || echo "N/A")

Backup Settings:
---------------
Compression: $COMPRESS_BACKUPS
Encryption: ${ENCRYPT_BACKUPS:-false}
S3 Upload: $([ -n "$S3_BUCKET" ] && echo "Enabled ($S3_BUCKET)" || echo "Disabled")
Retention: $RETENTION_DAYS days

System Information:
------------------
Disk Usage: $(df -h "$BACKUP_BASE_DIR" | tail -1)
Memory Usage: $(free -h | grep Mem)
Load Average: $(uptime | cut -d'load' -f2)

EOF
    
    info "Backup report generated: $report_file"
    
    # Send report via email
    if [[ "$EMAIL_NOTIFICATIONS" == "true" ]]; then
        send_notification "Backup Completed Successfully" "$(cat "$report_file")"
    fi
}

# Main backup function
run_backup() {
    local backup_type="${1:-full}"
    local start_time=$(date +%s)
    
    log "Starting $backup_type backup for ArXiv Subscription Platform..."
    
    setup_backup_environment
    
    local db_backup=""
    local files_backup=""
    local configs_backup=""
    local logs_backup=""
    
    case "$backup_type" in
        "full")
            db_backup=$(backup_database)
            backup_docker_volumes
            files_backup=$(backup_application_files)
            configs_backup=$(backup_system_configs)
            logs_backup=$(backup_logs)
            ;;
        "database")
            db_backup=$(backup_database)
            ;;
        "files")
            files_backup=$(backup_application_files)
            backup_docker_volumes
            ;;
        "configs")
            configs_backup=$(backup_system_configs)
            ;;
        "logs")
            logs_backup=$(backup_logs)
            ;;
        *)
            error "Invalid backup type: $backup_type. Use: full, database, files, configs, logs"
            ;;
    esac
    
    # Verify backups
    [[ -n "$db_backup" ]] && verify_backup "$db_backup" "database"
    [[ -n "$files_backup" ]] && verify_backup "$files_backup" "files"
    [[ -n "$configs_backup" ]] && verify_backup "$configs_backup" "configs"
    [[ -n "$logs_backup" ]] && verify_backup "$logs_backup" "logs"
    
    cleanup_old_backups
    
    local end_time=$(date +%s)
    generate_backup_report "$start_time" "$end_time" "$db_backup" "$files_backup" "$configs_backup" "$logs_backup"
    
    log "Backup completed successfully in $((end_time - start_time)) seconds"
}

# Restore function
restore_backup() {
    local backup_file="$1"
    local restore_type="$2"
    
    if [[ ! -f "$backup_file" ]]; then
        error "Backup file not found: $backup_file"
    fi
    
    log "Starting restore from: $backup_file"
    
    case "$restore_type" in
        "database")
            log "Restoring database..."
            
            # Decrypt if needed
            if [[ "$backup_file" =~ \.enc$ ]]; then
                local decrypted_file="${backup_file%.enc}"
                openssl enc -aes-256-cbc -d -in "$backup_file" -out "$decrypted_file" -pass file:"$ENCRYPTION_KEY_FILE"
                backup_file="$decrypted_file"
            fi
            
            # Decompress if needed
            if [[ "$backup_file" =~ \.gz$ ]]; then
                gunzip "$backup_file"
                backup_file="${backup_file%.gz}"
            fi
            
            # Restore database
            pg_restore -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" \
                --clean --create --verbose "$backup_file"
            
            log "Database restore completed"
            ;;
        *)
            error "Restore type not implemented: $restore_type"
            ;;
    esac
}

# Set up log file
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
LOG_FILE="/var/log/arxiv-platform/backup_${TIMESTAMP}.log"
mkdir -p "$(dirname "$LOG_FILE")"

# Main execution
case "${1:-full}" in
    "full"|"database"|"files"|"configs"|"logs")
        run_backup "$1"
        ;;
    "restore")
        restore_backup "$2" "$3"
        ;;
    "cleanup")
        cleanup_old_backups
        ;;
    *)
        echo "Usage: $0 {full|database|files|configs|logs|restore|cleanup}"
        echo "  full     - Complete backup (default)"
        echo "  database - Database only"
        echo "  files    - Application files and Docker volumes"
        echo "  configs  - System configuration files" 
        echo "  logs     - Log files (last 7 days)"
        echo "  restore  - Restore from backup file"
        echo "  cleanup  - Remove old backups"
        exit 1
        ;;
esac