#!/bin/bash
# {{ ansible_managed }}
# This file was updated by update_file tool
# =============================================================================
# 数据备份脚本 - Data Backup Script
# 监控基础设施的应用数据备份脚本
# 
# 此脚本由Ansible自动生成，专门负责备份应用数据目录
# 包括Web应用数据、监控系统数据等重要业务数据
# 
# 功能特性：
# - 备份重要应用数据目录
# - 支持压缩和加密
# - 支持远程同步
# - 自动清理过期备份
# - 详细的日志记录
# 
# 使用方法：
#   ./backup_data.sh
# =============================================================================

# 设置严格的错误处理模式
set -euo pipefail

# =============================================================================
# 配置参数 - Configuration Parameters
# =============================================================================

# 备份根目录
BACKUP_BASE_DIR="{{ backup_base_dir }}"

# 临时目录
BACKUP_TEMP_DIR="{{ backup_temp_dir }}"

# 日志目录
BACKUP_LOG_DIR="{{ backup_log_dir }}"

# 数据备份目录
DATA_BACKUP_DIR="${BACKUP_BASE_DIR}/data"

# 时间戳
TIMESTAMP=$(date +"%Y%m%d_%H%M%S")

# 日志文件
LOG_FILE="${BACKUP_LOG_DIR}/data_backup_${TIMESTAMP}.log"

# 压缩方式
COMPRESSION_METHOD="gzip"

# =============================================================================
# 日志函数 - Logging Functions
# =============================================================================

# 日志记录函数
log() {
    local level="$1"
    shift
    echo "[$(date '+%Y-%m-%d %H:%M:%S')] [$level] $*" | tee -a "$LOG_FILE"
}

# 信息日志
log_info() {
    log "INFO" "$@"
}

# 错误日志
log_error() {
    log "ERROR" "$@"
}

# 警告日志
log_warn() {
    log "WARN" "$@"
}

# =============================================================================
# 备份函数 - Backup Functions
# =============================================================================

# 创建备份目录
create_backup_dirs() {
    log_info "创建备份目录结构..."
    mkdir -p "$DATA_BACKUP_DIR"
    mkdir -p "$BACKUP_TEMP_DIR"
    mkdir -p "$BACKUP_LOG_DIR"
}

# 备份应用数据
backup_app_data() {
    log_info "开始备份应用数据..."
    
{% for data_dir in backup_data_dirs %}
    local data_archive_{{ loop.index }}="${DATA_BACKUP_DIR}/{{ data_dir.name }}_${TIMESTAMP}.tar"
    log_info "备份 {{ data_dir.name }} 数据: {{ data_dir.path }}"
    
    # 创建排除文件列表
    local exclude_file_{{ loop.index }}="${BACKUP_TEMP_DIR}/exclude_{{ data_dir.name }}_${TIMESTAMP}.txt"
    touch "$exclude_file_{{ loop.index }}"
    
{% if data_dir.exclude is defined %}
{% for exclude_pattern in data_dir.exclude %}
    echo "{{ exclude_pattern }}" >> "$exclude_file_{{ loop.index }}"
{% endfor %}
{% endif %}
    
    # 创建数据备份
    if tar -X "$exclude_file_{{ loop.index }}" -cf "$data_archive_{{ loop.index }}" "{{ data_dir.path }}" 2>/dev/null; then
        log_info "{{ data_dir.name }} 数据备份创建成功: $data_archive_{{ loop.index }}"
        
        # 压缩备份文件
        compress_backup "$data_archive_{{ loop.index }}"
        
{% if backup_encryption %}
        # 加密备份文件
        encrypt_backup "${data_archive_{{ loop.index }}}.gz"
{% endif %}
    else
        log_error "{{ data_dir.name }} 数据备份创建失败"
    fi
    
    # 清理临时文件
    rm -f "$exclude_file_{{ loop.index }}"
{% endfor %}
}

# 压缩备份文件
compress_backup() {
    local file="$1"
    log_info "压缩备份文件: $file"
    
    gzip "$file"
}

{% if backup_encryption %}
# 加密备份文件
encrypt_backup() {
    local file="$1"
    log_info "加密备份文件: $file"
    
{% if backup_encryption_config.method == 'gpg' %}
    gpg --cipher-algo AES256 --compress-algo 1 --symmetric --output "${file}.gpg" "$file"
    rm -f "$file"
{% elif backup_encryption_config.method == 'openssl' %}
    openssl enc -aes-256-cbc -salt -in "$file" -out "${file}.enc"
    rm -f "$file"
{% endif %}
}
{% endif %}

# 清理过期备份
cleanup_old_backups() {
    log_info "清理过期的数据备份文件..."
    find "$DATA_BACKUP_DIR" -name "*.tar*" -mtime +{{ backup_retention_days }} -delete
    log_info "过期备份清理完成"
}

{% if backup_remote_enabled %}
# 同步到远程
sync_to_remote() {
    log_info "同步备份到远程存储..."
    
{% if backup_remote_type == 'rsync' %}
    rsync -avz --delete \
        -e "ssh -i {{ backup_remote_config.rsync.ssh_key }}" \
        "$DATA_BACKUP_DIR/" \
        "{{ backup_remote_config.rsync.user }}@{{ backup_remote_config.rsync.host }}:{{ backup_remote_config.rsync.path }}/data/"
{% elif backup_remote_type == 's3' %}
    aws s3 sync "$DATA_BACKUP_DIR/" "s3://{{ backup_remote_config.s3.bucket }}/{{ backup_remote_config.s3.prefix }}data/" \
        --storage-class {{ backup_remote_config.s3.storage_class }} \
        --region {{ backup_remote_config.s3.region }}
{% elif backup_remote_type == 'ftp' %}
    find "$DATA_BACKUP_DIR" -type f -name "*.tar*" -mtime -1 | while read file; do
        curl -T "$file" \
            -u "{{ backup_remote_config.ftp.username }}:{{ backup_remote_config.ftp.password }}" \
            "ftp://{{ backup_remote_config.ftp.host }}:{{ backup_remote_config.ftp.port }}{{ backup_remote_config.ftp.path }}/data/$(basename "$file")"
    done
{% endif %}
    
    log_info "远程同步完成"
}
{% endif %}

# =============================================================================
# 主函数 - Main Function
# =============================================================================

main() {
    log_info "开始应用数据备份任务..."
    
    # 创建必要的目录
    create_backup_dirs
    
    # 备份应用数据
    backup_app_data
    
    # 清理过期备份
    cleanup_old_backups
    
{% if backup_remote_enabled %}
    # 同步到远程
    sync_to_remote
{% endif %}
    
    log_info "应用数据备份任务完成"
}

# =============================================================================
# 脚本入口 - Script Entry Point
# =============================================================================

# 捕获错误并记录
trap 'log_error "数据备份脚本执行失败，退出码: $?"' ERR

# 执行主函数
main "$@"

log_info "数据备份脚本执行完成"