#!/usr/bin/env bash

# 基础路径配置
readonly BASE_DIR="/root/ports_workflow"
cd "${BASE_DIR}" || { echo "Error: Cannot change to ${BASE_DIR}"; exit 1; }

# 检测脚本是否在交互式终端中运行
if [ -t 1 ]; then
    readonly IS_INTERACTIVE=true
else
    readonly IS_INTERACTIVE=false
fi

# 全局配置
declare -A CONFIG=(
    [EMAIL_FROM]="xxxxxxx@xxxxxxx.cn"
    [EMAIL_TO]="xxxxxxx@xxxxxxx.cn"
    [DB_NAME]="${BASE_DIR}/traffic.db"
    [REPORTS_DIR]="${BASE_DIR}/reports"
    [BACKUP_DIR]="${BASE_DIR}/backups"
    [TEMP_DIR]="/tmp"
    [LOGS_DIR]="${BASE_DIR}/logs"
    [TRAFFIC_ALERT_THRESHOLD]=90
    [MAX_WAIT_MINUTES]=50
    [PROCESS_CHECK_INTERVAL]=10
    [RETENTION_DAYS]=90
    [DAILY_LOCK_FILE]="${BASE_DIR}/.daily_report_lock"
    [MONTHLY_LOCK_FILE]="${BASE_DIR}/.monthly_report_lock"
    [MAX_CONCURRENT_JOBS]=5
    [INTER_JOB_DELAY]=0.2
    [MASTER_PORT_LIST]="${BASE_DIR}/ports_master_list.txt"
)

# 初始化日志系统
init_logging() {
    if [ ! -d "${CONFIG[LOGS_DIR]}" ]; then
        mkdir -p "${CONFIG[LOGS_DIR]}"
    fi

    local log_prefix="traffic_collect"
    case "$1" in
        --daily) log_prefix="daily_report";;
        --monthly) log_prefix="monthly_report";;
        --clean-locks) log_prefix="clean_locks";;
        --cleanup) log_prefix="system_cleanup";;
    esac

    readonly LOG_FILE="${CONFIG[LOGS_DIR]}/${log_prefix}_$(date +%Y%m%d).log"

    if [ ! -f "$LOG_FILE" ]; then
        {
            echo "=== Log started at $(date '+%Y-%m-%d %H:%M:%S') ==="
            echo "Script: $0"
            echo "Arguments: $*"
            echo "PID: $$"
            echo "User: $(whoami)"
            echo "==========================================="
        } >> "$LOG_FILE"
    fi
}

log_message() {
    local timestamp; timestamp=$(date '+%Y-%m-%d %H:%M:%S.%3N')
    local message_for_log="[${timestamp}] $1"

    # 始终写入日志文件
    [ -n "$LOG_FILE" ] && echo "$message_for_log" >> "$LOG_FILE"

    # 如果是交互式运行，则向屏幕打印简洁版信息
    if [[ "$IS_INTERACTIVE" == true ]]; then
        echo "$1"
    fi
}

log_error() {
    local timestamp; timestamp=$(date '+%Y-%m-%d %H:%M:%S.%3N')
    local message_for_log="[${timestamp}] ERROR: $1"

    [ -n "$LOG_FILE" ] && echo "$message_for_log" >> "$LOG_FILE"

    if [[ "$IS_INTERACTIVE" == true ]]; then
        # 在屏幕上打印错误时，总是使用 >&2
        echo "ERROR: $1" >&2
    fi
}

perform_system_cleanup(){ :; }
write_utf8_bom(){ :; }
cleanup_lock_files(){ :; }
check_running_process(){ :; }
wait_for_process_completion(){ :; }
check_report_generated(){ :; }
mark_report_generated(){ :; }
read_acc_file(){ :; }
send_alert(){ :; }
send_status_alert(){ :; }
ensure_column_exists(){ :; }
init_database(){ :; }
check_files(){ :; }
collect_data(){ :; }
sanitize_filename(){ :; }
generate_detail_report(){ :; }
generate_full_summary_report(){ :; }
generate_stats_report(){ :; }
generate_ip_summary(){ :; }
generate_port_reports(){ :; }
create_archive(){ :; }
send_report_email(){ :; }
generate_report(){ :; }
generate_daily_report(){ :; }
generate_monthly_report(){ :; }
backup_database(){ :; }
cleanup_on_exit(){ :; }
show_usage(){ :; }
main(){ :; }

perform_system_cleanup() {
    local retention_days=${CONFIG[RETENTION_DAYS]}
    log_message "=== Starting Unified System Cleanup (retention: ${retention_days} days) ==="
    find "${CONFIG[LOGS_DIR]}" -type f -name "*.log" -mtime "+${retention_days}" -print -delete | wc -l | xargs -I {} log_message "Cleaned up {} log files."
    find "${CONFIG[BACKUP_DIR]}" -type f -name "traffic_*.db" -mtime "+${retention_days}" -print -delete | wc -l | xargs -I {} log_message "Cleaned up {} database backups."
    find "${CONFIG[REPORTS_DIR]}" -maxdepth 1 -type d -name "????-??-??" -mtime "+${retention_days}" -print -exec rm -rf {} \; | wc -l | xargs -I {} log_message "Cleaned up {} report directories."
    log_message "=== Unified System Cleanup Completed ==="
}
write_utf8_bom() { printf '\xEF\xBB\xBF' > "$1"; }
cleanup_lock_files() {
    local today; today=$(date +%Y-%m-%d)
    local current_month; current_month=$(date +%Y-%m)
    if [ -f "${CONFIG[DAILY_LOCK_FILE]}" ] && [ "$(cat "${CONFIG[DAILY_LOCK_FILE]}")" != "$today" ]; then log_message "Cleaning up stale daily lock file."; rm -f "${CONFIG[DAILY_LOCK_FILE]}"; fi
    if [ -f "${CONFIG[MONTHLY_LOCK_FILE]}" ] && [ "$(cat "${CONFIG[MONTHLY_LOCK_FILE]}")" != "$current_month" ]; then log_message "Cleaning up stale monthly lock file."; rm -f "${CONFIG[MONTHLY_LOCK_FILE]}"; fi
}
check_running_process() { pgrep -f "collect_traffic.sh$" | grep -v "^$$\$" > /dev/null; }
wait_for_process_completion() {
    local elapsed=0; local max_wait_seconds; max_wait_seconds=$((${CONFIG[MAX_WAIT_MINUTES]} * 60))
    if ! check_running_process; then log_message "No other data collection process running. Proceeding."; return 0; fi
    log_message "Waiting for other data collection processes to complete..."
    while check_running_process; do
        sleep "${CONFIG[PROCESS_CHECK_INTERVAL]}"; elapsed=$((elapsed + ${CONFIG[PROCESS_CHECK_INTERVAL]}))
        log_message "Waited ${elapsed} seconds..."; if [ $elapsed -ge $max_wait_seconds ]; then log_error "Wait time exceeded ${CONFIG[MAX_WAIT_MINUTES]} minutes. Proceeding anyway."; break; fi
    done
}
check_report_generated() { [ -f "$1" ] && [ "$(cat "$1")" = "$2" ]; }
mark_report_generated() { echo "$2" > "$1"; chmod 644 "$1"; log_message "Marked $3 report as generated for $2."; }
read_acc_file() {
    local result=()
    while IFS=' ' read -r ip user pass protocol conn_port; do if [ -n "$ip" ] && [ -n "$user" ] && [ -n "$pass" ] && [ -n "$protocol" ] && [ -n "$conn_port" ]; then result+=("$ip" "$user" "$pass" "$protocol" "$conn_port"); fi; done < <(./encrypt_acc.sh decrypt)
    if [ ${#result[@]} -eq 0 ]; then log_error "Failed to read or decrypt any valid credentials."; return 1; fi; echo "${result[@]}"
}
send_alert() {
    local check_time="$1" ip="$2" port="$3" desc="$4"; local input_bits="$5" output_bits="$6" input_util="$7" output_util="$8"
    local email_subject="[ALERT] High Traffic Utilization - ${ip}:${port}"
    local email_body="High traffic utilization detected!\n\nCheck Time: ${check_time}\nIP: ${ip}\nPort: ${port}\nDescription: ${desc}\nInput Rate: ${input_bits} bits/sec\nOutput Rate: ${output_bits} bits/sec\nInput Utilization: ${input_util}%\nOutput Utilization: ${output_util}%"
    if ! echo -e "$email_body" | mail -A xxxxxxx -s "$email_subject" -r "${CONFIG[EMAIL_FROM]}" "${CONFIG[EMAIL_TO]}"; then log_error "Failed to send traffic alert email for ${ip}:${port}"; else log_message "Traffic alert email sent successfully for ${ip}:${port}"; fi
}
send_status_alert() {
    local check_time="$1" ip="$2" port="$3" desc="$4" status="$5"
    local email_subject="[ALERT] Port Status Abnormal - ${ip}:${port}"
    local email_body="Abnormal port status detected!\n\nCheck Time: ${check_time}\nIP: ${ip}\nPort: ${port}\nDescription: ${desc}\nCurrent Status: ${status}"
    if ! echo -e "$email_body" | mail -A xxxxxxx -s "$email_subject" -r "${CONFIG[EMAIL_FROM]}" "${CONFIG[EMAIL_TO]}"; then log_error "Failed to send status alert email for ${ip}:${port}"; else log_message "Status alert email sent successfully for ${ip}:${port}"; fi
}
ensure_column_exists() {
    local db_file="$1" table_name="$2" column_name="$3" column_definition="$4"
    if ! sqlite3 "$db_file" "PRAGMA table_info(${table_name});" | awk -F'|' -v col="$column_name" '$2 == col {found=1; exit} END {exit !found}'; then
        log_message "Schema update: Column '${column_name}' not found in table '${table_name}'. Adding it..."; sqlite3 "$db_file" "ALTER TABLE ${table_name} ADD COLUMN ${column_name} ${column_definition};" || { log_error "FATAL: Failed to add column '${column_name}' to table '${table_name}'."; exit 1; }; log_message "Column '${column_name}' added successfully."
    fi
}
init_database() {
    local db="${CONFIG[DB_NAME]}"; sqlite3 "$db" "CREATE TABLE IF NOT EXISTS traffic (time TEXT, ip TEXT, port TEXT, desc TEXT, input INTEGER, output INTEGER, input_util REAL, output_util REAL); CREATE INDEX IF NOT EXISTS idx_traffic_time ON traffic(time);" || { log_error "Database base table creation failed."; exit 1; }
    ensure_column_exists "$db" "traffic" "port_status" "TEXT"; ensure_column_exists "$db" "traffic" "rx_power" "TEXT"; ensure_column_exists "$db" "traffic" "rx_high_thresh" "TEXT"; ensure_column_exists "$db" "traffic" "rx_low_thresh" "TEXT"; ensure_column_exists "$db" "traffic" "tx_power" "TEXT"; ensure_column_exists "$db" "traffic" "tx_high_thresh" "TEXT"; ensure_column_exists "$db" "traffic" "tx_low_thresh" "TEXT"; log_message "Database schema check complete and is up-to-date."
}
check_files() {
    for file in "${BASE_DIR}/acc" "${BASE_DIR}/get_port_traffic.exp" "${BASE_DIR}/encrypt_acc.sh" "${BASE_DIR}/key.txt"; do if [ ! -f "$file" ]; then log_error "$file not found."; exit 1; fi; done
    if [ ! -f "${CONFIG[MASTER_PORT_LIST]}" ]; then log_error "Master port list file not found: ${CONFIG[MASTER_PORT_LIST]}"; exit 1; fi
    chmod 600 "${BASE_DIR}/acc" "${BASE_DIR}/key.txt"; chmod +x "${BASE_DIR}/get_port_traffic.exp" "${BASE_DIR}/encrypt_acc.sh"; log_message "All required files checked and permissions set."
}
collect_data() {
    local acc_data; acc_data=($(read_acc_file)); [ $? -ne 0 ] && return 1; declare -A credentials; local i=0
    while [ $i -lt ${#acc_data[@]} ]; do local ip="${acc_data[i]}"; credentials["$ip"]="${acc_data[i+1]} ${acc_data[i+2]} ${acc_data[i+3]} ${acc_data[i+4]}"; i=$((i+5)); done
    local max_jobs=${CONFIG[MAX_CONCURRENT_JOBS]}; local job_delay=${CONFIG[INTER_JOB_DELAY]}; local output_dir; output_dir=$(mktemp -d -p "${CONFIG[TEMP_DIR]}" "collect_data_outputs_XXXXXX"); trap 'log_message "Cleaning up temp dir: $output_dir"; rm -rf "$output_dir"' RETURN
    log_message "=== Starting Concurrent Data Collection (max ${max_jobs} jobs, ${job_delay}s delay) ==="; log_message "Using master port list: ${CONFIG[MASTER_PORT_LIST]}"; log_message "Temporary output will be stored in ${output_dir}"; local current_model=""
    while IFS= read -r line || [[ -n "$line" ]]; do
        if [[ -z "$line" || "$line" =~ ^\s*# ]]; then continue; fi
        if [[ "$line" =~ ^! ]]; then if [[ "$line" =~ ^!([a-zA-Z0-9_]+)$ ]]; then current_model="${BASH_REMATCH[1]}"; log_message "Switched to device model: ${current_model}"; else log_error "Invalid model definition found: '$line'. Model names must contain only letters, numbers, and underscores. This definition will be ignored."; fi; continue; fi
        if [[ -z "$current_model" ]]; then log_error "Skipping line, no device model specified yet: '$line'"; continue; fi
        local target_ip device_port desc; read -r target_ip device_port desc <<< "$line"
        if [[ -z "$target_ip" || -z "$device_port" ]]; then log_error "Skipping malformed data line: '$line'"; continue; fi
        local creds=${credentials[$target_ip]}; if [[ -z "$creds" ]]; then log_error "No credentials found for IP ${target_ip} in 'acc' file. Skipping port ${device_port}."; continue; fi
        local user pass protocol conn_port; read -r user pass protocol conn_port <<< "$creds"
        while (( $(jobs -p | wc -l) >= max_jobs )); do sleep 1; done
        log_message "-> Launching collection for ${target_ip}:${device_port} (Model: ${current_model})"
        ( export DEVICE_PASS="$pass"; raw_output=$("${BASE_DIR}/get_port_traffic.exp" "$target_ip" "$user" "$protocol" "$conn_port" "$device_port" "$current_model" 2>&1); echo -e "$target_ip\t$device_port\t$desc\t$current_model\t$raw_output" > "${output_dir}/${target_ip}-${device_port//\//_}.out"; ) &
        sleep "$job_delay"; done < "${CONFIG[MASTER_PORT_LIST]}"; log_message "All collection jobs have been launched. Waiting for all remaining jobs to complete..."; wait; log_message "All collection jobs have finished."
    log_message "=== Starting Sequential Data Processing and Database Insertion ==="; local success_count=0; local error_count=0
    for result_file in "$output_dir"/*.out; do
        [ -f "$result_file" ] || continue; local ip device_port desc device_model output; IFS=$'\t' read -r ip device_port desc device_model output < "$result_file"
        if [[ "$output" =~ ^DATA: ]]; then
            ((success_count++)); log_message "  [SUCCESS] Processing result for ${ip}:${device_port}."; local input_bits output_bits input_util output_util port_status; local rx_power rx_high_thresh rx_low_thresh tx_power tx_high_thresh tx_low_thresh
            IFS=':' read -r _ input_bits output_bits input_util output_util port_status rx_power rx_high_thresh rx_low_thresh tx_power tx_high_thresh tx_low_thresh <<< "$output"; tx_low_thresh=${tx_low_thresh//$'\r'/}
            local status_lower; status_lower=$(echo "$port_status" | tr '[:upper:]' '[:lower:]'); if [[ -n "$status_lower" && "$status_lower" != "up" ]]; then send_status_alert "$(date '+%Y-%m-%d %H:%M:%S')" "$ip" "$device_port" "$desc" "$port_status"; fi
            if (( $(echo "$input_util > ${CONFIG[TRAFFIC_ALERT_THRESHOLD]}" | bc -l) )) || (( $(echo "$output_util > ${CONFIG[TRAFFIC_ALERT_THRESHOLD]}" | bc -l) )); then send_alert "$(date '+%Y-%m-%d %H:%M:%S')" "$ip" "$device_port" "$desc" "$input_bits" "$output_bits" "$input_util" "$output_util"; fi
            [[ "$rx_power" == "N/A" ]] && sql_rx_power="''" || sql_rx_power="'$rx_power'"; [[ "$rx_high_thresh" == "N/A" ]] && sql_rx_high="''" || sql_rx_high="'$rx_high_thresh'"; [[ "$rx_low_thresh" == "N/A" ]] && sql_rx_low="''" || sql_rx_low="'$rx_low_thresh'"; [[ "$tx_power" == "N/A" ]] && sql_tx_power="''" || sql_tx_power="'$tx_power'"; [[ "$tx_high_thresh" == "N/A" ]] && sql_tx_high="''" || sql_tx_high="'$tx_high_thresh'"; [[ "$tx_low_thresh" == "N/A" ]] && sql_tx_low="''" || sql_tx_low="'$tx_low_thresh'"
            sqlite3 "${CONFIG[DB_NAME]}" "INSERT INTO traffic (time, ip, port, desc, input, output, input_util, output_util, port_status, rx_power, rx_high_thresh, rx_low_thresh, tx_power, tx_high_thresh, tx_low_thresh) VALUES (datetime('now','localtime'), '$ip', '$device_port', '$desc', ${input_bits:-0}, ${output_bits:-0}, ${input_util:-0}, ${output_util:-0}, '$port_status', ${sql_rx_power}, ${sql_rx_high}, ${sql_rx_low}, ${sql_tx_power}, ${sql_tx_high}, ${sql_tx_low});"
        elif [[ -n "$output" ]]; then ((error_count++)); log_error "  [ERROR] Data collection error for $ip:$device_port (Model: $device_model): $output"; else ((error_count++)); log_error "  [ERROR] Data collection failed for $ip:$device_port (Model: $device_model). No output was generated."; fi
    done; log_message "Data processing complete. Success: ${success_count}, Errors: ${error_count}."; unset acc_data credentials
}
sanitize_filename() { echo "$1" | sed -e 's/[^A-Za-z0-9._-]/_/g'; }
generate_detail_report() {
    local ip="$1" port="$2" desc="$3" start_date="$4" end_date="$5" report_date="$6" output_dir="$7"; local sanitized_desc; sanitized_desc=$(sanitize_filename "$desc"); local report_file="${output_dir}/raw_data/${ip}_${sanitized_desc}_${report_date}.csv"; write_utf8_bom "$report_file"
    echo "IP,Port,Description,Time,Input Rate (bits/sec),Output Rate (bits/sec),Input Utilization (%),Output Utilization (%),PortStatus,Current RX Power (dBm),RX Power High Thresh (dBm),RX Power Low Thresh (dBm),Current TX Power (dBm),TX Power High Thresh (dBm),TX Power Low Thresh (dBm)" >> "$report_file"
    sqlite3 -csv "${CONFIG[DB_NAME]}" "SELECT ip,port,desc,time,input,output,input_util,output_util,port_status, rx_power,rx_high_thresh,rx_low_thresh,tx_power,tx_high_thresh,tx_low_thresh FROM traffic WHERE port='$port' AND ip='$ip' AND date(time) BETWEEN '$start_date' AND '$end_date' ORDER BY time ASC" >> "$report_file"
}
generate_full_summary_report() {
    local start_date="$1" end_date="$2" output_file="$3"; log_message "Generating full summary report from $start_date to $end_date"; write_utf8_bom "$output_file"
    echo "IP,Port,Description,Time,Input Rate (bits/sec),Output Rate (bits/sec),Input Utilization (%),Output Utilization (%),PortStatus,Current RX Power (dBm),RX Power High Thresh (dBm),RX Power Low Thresh (dBm),Current TX Power (dBm),TX Power High Thresh (dBm),TX Power Low Thresh (dBm)" >> "$output_file"
    sqlite3 -csv "${CONFIG[DB_NAME]}" "SELECT ip,port,desc,time,input,output,input_util,output_util,port_status, rx_power,rx_high_thresh,rx_low_thresh,tx_power,tx_high_thresh,tx_low_thresh FROM traffic WHERE date(time) BETWEEN '$start_date' AND '$end_date' ORDER BY ip, port, time ASC" >> "$output_file"
}
generate_stats_report() {
    local ip="$1" port="$2" desc="$3" start_date="$4" end_date="$5" report_date="$6" output_dir="$7"; local sanitized_desc; sanitized_desc=$(sanitize_filename "$desc"); local stats_file="${output_dir}/statistics/${ip}_${sanitized_desc}_${report_date}_statistics.csv"; write_utf8_bom "$stats_file"
    echo "Peak Time,IP,Port,Description,Period,Samples,Avg Input (bits/sec),Max Input (bits/sec),Avg Output (bits/sec),Max Output (bits/sec),Avg In Util (%),Max In Util (%),Avg Out Util (%),Max Out Util (%),PortStatus at Peak,RX Power at Peak,RX High Thresh at Peak,RX Low Thresh at Peak,TX Power at Peak,TX High Thresh at Peak,TX Low Thresh at Peak" >> "$stats_file"
    local peak_subquery="FROM traffic WHERE port='$port' AND ip='$ip' AND date(time) BETWEEN '$start_date' AND '$end_date' ORDER BY input + output DESC LIMIT 1"
    sqlite3 -csv "${CONFIG[DB_NAME]}" "SELECT (SELECT time $peak_subquery),'$ip','$port','$desc','$report_date', COUNT(*),ROUND(AVG(input)),MAX(input),ROUND(AVG(output)),MAX(output), ROUND(AVG(input_util),2),ROUND(MAX(input_util),2), ROUND(AVG(output_util),2),ROUND(MAX(output_util),2), (SELECT port_status $peak_subquery), (SELECT rx_power $peak_subquery), (SELECT rx_high_thresh $peak_subquery), (SELECT rx_low_thresh $peak_subquery), (SELECT tx_power $peak_subquery), (SELECT tx_high_thresh $peak_subquery), (SELECT tx_low_thresh $peak_subquery) FROM traffic WHERE port='$port' AND ip='$ip' AND date(time) BETWEEN '$start_date' AND '$end_date'" >> "$stats_file"
}
generate_ip_summary() {
    local ip="$1" start_date="$2" end_date="$3" report_date="$4" output_dir="$5"; local summary_file="${output_dir}/${ip}_summary_${report_date}.csv"; write_utf8_bom "$summary_file"
    echo "Peak Time,IP,Port,Description,Samples,Avg Input (bits/sec),Max Input (bits/sec),Avg Output (bits/sec),Max Output (bits/sec),Avg In Util (%),Max In Util (%),Avg Out Util (%),Max Out Util (%),PortStatus at Peak,RX Power at Peak,RX High Thresh at Peak,RX Low Thresh at Peak,TX Power at Peak,TX High Thresh at Peak,TX Low Thresh at Peak" >> "$summary_file"
    sqlite3 -separator ' ' "${CONFIG[DB_NAME]}" "SELECT DISTINCT port, desc FROM traffic WHERE ip='$ip' AND date(time) BETWEEN '$start_date' AND '$end_date'" | \
    while read -r port desc; do
        local peak_subquery="FROM traffic WHERE port='$port' AND ip='$ip' AND date(time) BETWEEN '$start_date' AND '$end_date' ORDER BY input + output DESC LIMIT 1"
        sqlite3 -csv "${CONFIG[DB_NAME]}" "SELECT (SELECT time $peak_subquery),'$ip','$port','$desc', COUNT(*),ROUND(AVG(input)),MAX(input),ROUND(AVG(output)),MAX(output), ROUND(AVG(input_util),2),ROUND(MAX(input_util),2), ROUND(AVG(output_util),2),ROUND(MAX(output_util),2), (SELECT port_status $peak_subquery), (SELECT rx_power $peak_subquery), (SELECT rx_high_thresh $peak_subquery), (SELECT rx_low_thresh $peak_subquery), (SELECT tx_power $peak_subquery), (SELECT tx_high_thresh $peak_subquery), (SELECT tx_low_thresh $peak_subquery) FROM traffic WHERE port='$port' AND ip='$ip' AND date(time) BETWEEN '$start_date' AND '$end_date'" >> "$summary_file"
    done
}
generate_port_reports() {
    local ip="$1" start_date="$2" end_date="$3" report_date="$4" output_dir="$5"
    sqlite3 -separator ' ' "${CONFIG[DB_NAME]}" "SELECT DISTINCT port, desc FROM traffic WHERE ip='$ip' AND date(time) BETWEEN '$start_date' AND '$end_date'" | \
    while read -r port desc; do log_message "Processing port reports for $ip:$port"; generate_detail_report "$ip" "$port" "$desc" "$start_date" "$end_date" "$report_date" "$output_dir"; generate_stats_report "$ip" "$port" "$desc" "$start_date" "$end_date" "$report_date" "$output_dir"; done
}
create_archive() {
    local ip="$1" source_dir="$2" output_dir="$3" report_type="$4" report_date="$5"; log_message "Creating archive for IP $ip"
    ( cd "$source_dir" || exit 1; tar -czf "${output_dir}/${ip}_${report_type}_traffic_report_${report_date}.tar.gz" . ); log_message "Archive created: ${ip}_${report_type}_traffic_report_${report_date}.tar.gz"
}
send_report_email() {
    local report_dir="$1" subject="$2" report_date="$3" start_date="$4" end_date="$5"; log_message "Preparing to send professional report email for period ${start_date} to ${end_date}"
    local processed_ips; processed_ips=$(ls -1 "${report_dir}"/*.tar.gz 2>/dev/null | sed -E 's/.*\/([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)_.*/\1/' | sort -u | tr '\n' ', ' | sed 's/, $//'); [ -z "$processed_ips" ] && processed_ips="无"
    local attachment_list; attachment_list=$(find "$report_dir" -maxdepth 1 -type f \( -name "*.tar.gz" -o -name "*.csv" \) -exec basename {} \; | paste -sd '\n' -)
    local email_body; read -r -d '' email_body << EOM
这是为您自动生成的**${subject}**。
**报告摘要:**
  - **报告周期:** ${start_date} 至 ${end_date}
  - **报告生成时间:** $(date '+%Y-%m-%d %H:%M:%S')
  - **监控设备IP:** ${processed_ips}
  - **报告内容:** 本邮件附件包含了详细的流量数据、统计摘要以及全量汇总。报告中已包含各端口的状态和光衰信息。
**附件说明:**
${attachment_list}
  - **tar.gz 压缩包:** 每个IP一个，内含该设备所有监控端口的原始数据(raw_data)和统计数据(statistics)。
  - **full_summary...csv:** 周期内所有设备所有端口的流量与光衰数据汇总。
请查收附件以获取详细信息。
这是自动生成的报告，请勿回复此邮件。
EOM
    local -a mail_args=("mail" "-A" "xxxxxxx" "-s" "$subject" "-r" "${CONFIG[EMAIL_FROM]}"); local attachment_count=0
    for report in "${report_dir}"/*_traffic_report_*.tar.gz "${report_dir}"/full_summary_report_*.csv; do [ -f "$report" ] && mail_args+=("-a" "$report") && ((attachment_count++)); done
    if [ "$attachment_count" -gt 0 ]; then mail_args+=("${CONFIG[EMAIL_TO]}"); log_message "Sending email with $attachment_count attachments to ${CONFIG[EMAIL_TO]}"; if echo -e "$email_body" | "${mail_args[@]}"; then log_message "Reports sent successfully."; else log_error "Failed to send reports."; fi; else log_message "No report attachments found to send."; fi
}
generate_report() {
    local report_type="$1" start_date="$2" end_date="$3" report_date="$4" report_title="$5"; log_message "Generating ${report_type} report for period: ${start_date} to ${end_date}"
    local temp_zip_dir; temp_zip_dir=$(mktemp -d -p "${CONFIG[TEMP_DIR]}" "traffic_reports_${report_date}_XXXXXX"); trap 'rm -rf "$temp_zip_dir"' RETURN
    sqlite3 "${CONFIG[DB_NAME]}" "SELECT DISTINCT ip FROM traffic WHERE date(time) BETWEEN '$start_date' AND '$end_date'" | \
    while read -r ip; do
        [ -z "$ip" ] && continue; log_message "Processing ${report_type} reports for IP: $ip"; local ip_temp_dir="${temp_zip_dir}/${ip}"; mkdir -p "${ip_temp_dir}/raw_data" "${ip_temp_dir}/statistics"
        generate_port_reports "$ip" "$start_date" "$end_date" "$report_date" "$ip_temp_dir"; generate_ip_summary "$ip" "$start_date" "$end_date" "$report_date" "$ip_temp_dir"; create_archive "$ip" "$ip_temp_dir" "$temp_zip_dir" "$report_type" "$report_date"
    done
    generate_full_summary_report "$start_date" "$end_date" "${temp_zip_dir}/full_summary_report_${report_date}.csv"; send_report_email "$temp_zip_dir" "$report_title" "$report_date" "$start_date" "$end_date"
    local final_report_dir="${CONFIG[REPORTS_DIR]}/${report_date}"; mkdir -p "$final_report_dir"; cp -p "${temp_zip_dir}"/*.* "${final_report_dir}/" 2>/dev/null || true; log_message "All report files saved to ${final_report_dir}"
}
backup_database() {
    log_message "Starting database backup"; mkdir -p "${CONFIG[BACKUP_DIR]}"; local backup_file="${CONFIG[BACKUP_DIR]}/traffic_$(date +%Y%m%d).db"
    if cp "${CONFIG[DB_NAME]}" "$backup_file"; then log_message "Database backup created: $backup_file"; else log_error "Failed to create database backup"; fi
}
cleanup_on_exit() { log_message "Script finished with exit code: $?. Log ended at $(date '+%Y-%m-%d %H:%M:%S')."; echo "" >> "$LOG_FILE"; }
show_usage() {
    cat << EOF
Usage: $0 [option]
Options:
    (no option)     Run normal data collection task.
    --daily         Generate and email daily report.
    --monthly       Generate and email monthly report for the previous month.
    --cleanup       Perform system cleanup (delete old logs, backups, reports).
    --clean-locks   Forcefully remove all lock files.
    --help, -h      Display this help message.
Configuration:
  - Log Directory: ${CONFIG[LOGS_DIR]}
  - Retention Days: ${CONFIG[RETENTION_DAYS]}
  - Max Concurrent Jobs: ${CONFIG[MAX_CONCURRENT_JOBS]}
  - Inter-Job Delay: ${CONFIG[INTER_JOB_DELAY]}s
EOF
}

# 生成日报
generate_daily_report() {
    log_message "=== Daily report generation process STARTED ==="
    if check_report_generated "${CONFIG[DAILY_LOCK_FILE]}" "$(date +%Y-%m-%d)"; then
        log_message "Today's daily report already generated. Skipping."
        log_message "=== Daily report generation process FINISHED ==="
        return
    fi
    wait_for_process_completion
    local today; today=$(date +%Y-%m-%d)
    log_message "Report period START date: ${today}, END date: ${today}"
    generate_report "daily" "$today" "$today" "$today" "核心交换端口流量检查日报"
    mark_report_generated "${CONFIG[DAILY_LOCK_FILE]}" "$today" "daily"
    log_message "=== Daily report generation process FINISHED ==="
}

# 生成月报
generate_monthly_report() {
    log_message "=== Monthly report generation process STARTED ==="
    local last_month_date; last_month_date=$(date -d "last month" '+%Y-%m')
    if check_report_generated "${CONFIG[MONTHLY_LOCK_FILE]}" "$last_month_date"; then
        log_message "This month's report already generated. Skipping."
        log_message "=== Monthly report generation process FINISHED ==="
        return
    fi
    wait_for_process_completion
    local month_start; month_start=$(date -d "last month" '+%Y-%m-01')
    local month_end; month_end=$(date -d "$month_start +1 month -1 day" '+%Y-%m-%d')
    log_message "Report period START date: ${month_start}, END date: ${month_end}"
    generate_report "monthly" "$month_start" "$month_end" "$last_month_date" "核心交换端口流量检查月报"
    mark_report_generated "${CONFIG[MONTHLY_LOCK_FILE]}" "$last_month_date" "monthly"
    log_message "=== Monthly report generation process FINISHED ==="
}

# 主程序
main() {
    case "$1" in
        --help|-h) show_usage; exit 0;;
    esac

    init_logging "$@"
    trap cleanup_on_exit EXIT

    log_message "Script started with arguments: $*"
    cleanup_lock_files

    case "$1" in
        --daily)
            generate_daily_report
            backup_database
            ;;
        --monthly)
            generate_monthly_report
            backup_database
            ;;
        --cleanup)
            perform_system_cleanup
            ;;
        --clean-locks)
            rm -f "${CONFIG[DAILY_LOCK_FILE]}" "${CONFIG[MONTHLY_LOCK_FILE]}"
            log_message "All lock files have been removed."
            ;;
        "")
            wait_for_process_completion
            check_files
            init_database
            collect_data
            ;;
        *)
            log_error "Unknown parameter: '$1'"
            show_usage
            exit 1
            ;;
    esac
}

main "$@"