import os
import re
import logging
import time
from datetime import datetime, timedelta

import allure
from api.wf_upload_api import WfUploadApi
from config.config import ppa_prefix_to_api
from util.file_util import update_sys_biz_date
from util.db_util import DBUtil

@allure.step("Upload file with date update: {file_path}")
def upload_file_with_date_update(file_path):
    """
    根据文件名后缀判断日期，更新sys_biz_date后再上传文件
    
    Args:
        file_path: 文件路径
        
    Returns:
        dict: 上传结果，如果不匹配前缀则返回 {"status": "skip"}
    """
    
    if not os.path.exists(file_path):
        logging.error(f"[ERROR] File not found: {file_path}")
        return None
    
    filename = os.path.basename(file_path)
    logging.info(f"[INFO] Processing file: {filename}")
    
    # 根据文件名前缀匹配队列
    queue_name = None
    for prefix, api_name in ppa_prefix_to_api.items():
        if filename.lower().startswith(prefix):
            queue_name = api_name
            logging.info(f"[INFO] Matched prefix '{prefix}' to queue '{queue_name}'")
            break
    
    if not queue_name:
        logging.info(f"[INFO] File '{filename}' does not match any prefix in ppa_prefix_to_api, skipping")
        return {"status": "skip", "file": filename, "reason": "No matching prefix"}
    
    # 根据队列类型和文件名后缀判断日期
    biz_date = None
    wf_upload_api = WfUploadApi()

    try:
        if queue_name.startswith("ppa_daily_load_"):
            # 从文件名中提取 yyyymmdd 格式的日期
            m = re.search(r"(\d{8})", filename)
            if m:
                dt = datetime.strptime(m.group(1), "%Y%m%d")
                biz_date = dt.strftime("%Y-%m-%d")
                logging.info(f"[INFO] Extracted daily date: {biz_date}")
            else:
                logging.error(f"[ERROR] No yyyymmdd date found in filename: {filename}")
                return None
                
        elif queue_name.startswith("ppa_monthly_load_"):
            # 从文件名中提取 yyyymm 格式的日期
            m = re.search(r"(\d{6})", filename)
            if m:
                year = int(m.group(1)[:4])
                month = int(m.group(1)[4:6])
                # 获取该月最后一天
                last_day = (datetime(year, month + 1, 1) - timedelta(days=1)).day if month < 12 else 31
                dt = datetime(year, month, last_day)
                biz_date = dt.strftime("%Y-%m-%d")
                logging.info(f"[INFO] Extracted monthly date: {biz_date}")
            else:
                logging.error(f"[ERROR] No yyyymm date found in filename: {filename}")
                return None
                
        elif queue_name.startswith("ppa_yearly_load_"):
            # 从文件名中提取 yyyy 格式的日期
            m = re.search(r"(\d{4})", filename)
            if m:
                year = int(m.group(1))
                dt = datetime(year, 12, 31)
                biz_date = dt.strftime("%Y-%m-%d")
                logging.info(f"[INFO] Extracted yearly date: {biz_date}")
            else:
                logging.error(f"[ERROR] No yyyy date found in filename: {filename}")
                return None
                
        elif queue_name.startswith("ppa_load_historical_"):
            # 历史数据不需要更新日期
            logging.info(f"[INFO] Historical load queue, skip date update")
            return wf_upload_api.upload_file_to_queue(queue_name, file_path)
        else:
            logging.error(f"[ERROR] Unknown queue type: {queue_name}")
            return None
            
    except Exception as e:
        logging.error(f"[ERROR] Failed to extract date from filename: {e}")
        return None
    
    # 更新 sys_biz_date
    if biz_date:
        logging.info(f"[INFO] Updating sys_biz_date to: {biz_date}")
        update_sys_biz_date(biz_date)
    
    # 上传文件
    logging.info(f"[INFO] Uploading file to queue: {queue_name}")
    resp_json = wf_upload_api.upload_file_to_queue(queue_name, file_path)
    return resp_json


@allure.step("Batch upload files with wait: {folder_path}")
def batch_upload_files_with_wait(folder_path):
    """
    批量上传文件夹中的所有文件（包括子文件夹），按日期顺序上传并等待处理完成
    
    Args:
        folder_path: 文件夹路径
        
    Returns:
        list: 上传结果列表
    """
    if not os.path.exists(folder_path):
        logging.error(f"[ERROR] Folder not found: {folder_path}")
        return []
    
    # 递归获取所有文件并按日期排序
    files = []
    for root, dirs, filenames in os.walk(folder_path):
        for filename in filenames:
            file_path = os.path.join(root, filename)
            if os.path.isfile(file_path):
                # 提取日期用于排序
                date_match = re.search(r"(\d{8}|\d{6}|\d{4})", filename)
                if date_match:
                    date_str = date_match.group(1)
                    if len(date_str) == 8:  # yyyymmdd
                        sort_date = datetime.strptime(date_str, "%Y%m%d")
                    elif len(date_str) == 6:  # yyyymm
                        year, month = int(date_str[:4]), int(date_str[4:6])
                        sort_date = datetime(year, month, 1)
                    else:  # yyyy
                        year = int(date_str)
                        sort_date = datetime(year, 1, 1)
                    files.append((sort_date, file_path, filename))
                else:
                    # 没有日期信息的文件也加入，按文件名排序
                    files.append((datetime.min, file_path, filename))
    
    # 按日期排序
    files.sort(key=lambda x: x[0])
    
    logging.info(f"[INFO] Found {len(files)} files to process")
    
    results = []
    
    for i, (sort_date, file_path, filename) in enumerate(files, 1):
        logging.info(f"[INFO] Processing file {i}/{len(files)}: {filename} (date: {sort_date.strftime('%Y-%m-%d') if sort_date != datetime.min else 'No date'})")
        
        # 上传文件
        resp_json = upload_file_with_date_update(file_path)
        
        # 检查是否被跳过
        if resp_json and isinstance(resp_json, dict) and resp_json.get("status") == "skip":
            logging.info(f"[INFO] File skipped: {filename}")
            results.append({"file": filename, "status": "skipped", "reason": resp_json.get("reason")})
            continue
        
        # 检查上传是否失败
        if not resp_json:
            logging.error(f"[ERROR] Upload failed for file: {filename}")
            results.append({"file": filename, "status": "failed", "error": "Upload failed"})
            continue
        
        # 获取 fileId - 现在从完整的 resp_json 中提取
        files_info = resp_json.get("files", [])
        if not files_info:
            logging.error(f"[ERROR] No file info in response for: {filename}")
            results.append({"file": filename, "status": "failed", "error": "No file info"})
            continue
        
        file_id = files_info[0].get("fileId")
        if not file_id:
            logging.error(f"[ERROR] No fileId in response for: {filename}")
            results.append({"file": filename, "status": "failed", "error": "No fileId"})
            continue
        
        logging.info(f"[INFO] File uploaded successfully, fileId: {file_id}")
        
        # 等待处理完成
        status = wait_for_file_processing(file_id)
        if status:
            logging.info(f"[INFO] File processing completed, status: {status}")
            results.append({"file": filename, "status": "completed", "fileId": file_id, "final_status": status})
        else:
            logging.error(f"[ERROR] File processing timeout or error for: {filename}")
            results.append({"file": filename, "status": "timeout", "fileId": file_id})
    
    logging.info(f"[INFO] Batch upload completed. Total files: {len(files)}, Results: {len(results)}")
    return results


def wait_for_file_processing(file_id, timeout=600, interval=1):
    """
    等待文件处理完成
    
    Args:
        file_id: 文件ID
        timeout: 超时时间（秒）
        interval: 查询间隔（秒）
        
    Returns:
        str: 最终状态
    """
    start_time = time.time()
    
    while True:
        try:
            # 使用 DBUtil 查询状态
            sql = "SELECT STATUS FROM [PPA].[TB_PPA_PROCESS_AUDIT] WHERE SUP_FILE_ID = ?"
            row = DBUtil.select_one(sql, (file_id,))
            
            if row:
                status = row[0]
                logging.info(f"[STATUS] fileId={file_id} status: {status}")
                if status not in ("Pending", "Processing"):
                    return status
            else:
                logging.info(f"[INFO] No record found for fileId={file_id}, waiting...")
            
            if time.time() - start_time > timeout:
                logging.warning(f"[WARN] Timeout waiting for fileId={file_id} after {timeout}s")
                return None
            
            time.sleep(interval)
            
        except Exception as e:
            logging.error(f"[ERROR] Database query failed: {e}")
            time.sleep(interval)