"""
S1手环数据解析API路由
专门为data-parse.html页面提供后端API支持
包括S1手环数据文件上传、解析、下载等功能
"""

import os
import uuid
import zipfile
import logging
from datetime import datetime
from pathlib import Path
from typing import List, Dict, Any
from flask import Blueprint, request, jsonify, send_file
from werkzeug.utils import secure_filename
from werkzeug.exceptions import RequestEntityTooLarge

from src.sensor_extractor import SensorExtractor

# 创建蓝图 - 专门为S1手环数据解析页面服务
file_bp = Blueprint('file_operations', __name__, url_prefix='/files')

# 配置日志
logger = logging.getLogger(__name__)

# 配置目录
UPLOAD_FOLDER = Path('uploads')
OUTPUT_FOLDER = Path('output')
TEMP_FOLDER = Path('temp')

# 允许的文件扩展名 - 仅支持S1手环数据文件格式
ALLOWED_EXTENSIONS = {'txt'}

def allowed_file(filename: str) -> bool:
    """检查文件扩展名是否为S1手环数据文件格式"""
    return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS

def generate_task_id() -> str:
    """生成唯一的任务ID"""
    return str(uuid.uuid4())

@file_bp.route('/upload', methods=['POST'])
def upload_files():
    """S1手环数据文件上传接口"""
    try:
        # 检查是否有文件
        if 'files' not in request.files:
            return jsonify({'error': '没有选择文件'}), 400
        
        files = request.files.getlist('files')
        if not files or all(file.filename == '' for file in files):
            return jsonify({'error': '没有选择文件'}), 400
        
        # 生成任务ID
        task_id = generate_task_id()
        task_folder = UPLOAD_FOLDER / task_id
        task_folder.mkdir(exist_ok=True)
        
        uploaded_files = []
        
        # 处理每个文件
        for file in files:
            if file and file.filename and allowed_file(file.filename):
                filename = secure_filename(file.filename)
                if filename:
                    file_path = task_folder / filename
                    file.save(str(file_path))
                    uploaded_files.append({
                        'filename': filename,
                        'size': file_path.stat().st_size,
                        'path': str(file_path)
                    })
                    logger.info(f"文件上传成功: {filename}")
        
        if not uploaded_files:
            return jsonify({'error': '没有有效的文件被上传'}), 400
        
        return jsonify({
            'task_id': task_id,
            'files': uploaded_files,
            'message': f'成功上传 {len(uploaded_files)} 个文件'
        })
        
    except RequestEntityTooLarge:
        return jsonify({'error': '文件太大，最大支持100MB'}), 413
    except Exception as e:
        logger.error(f"文件上传错误: {e}")
        return jsonify({'error': f'上传失败: {str(e)}'}), 500

@file_bp.route('/convert', methods=['POST'])
def convert_files():
    """S1手环数据文件转换接口 - 整合上传和解析功能"""
    try:
        # 检查是否有文件
        if 'files' not in request.files:
            return jsonify({'error': '没有选择文件'}), 400
        
        files = request.files.getlist('files')
        if not files or all(file.filename == '' for file in files):
            return jsonify({'error': '没有选择文件'}), 400
        
        # 获取参数
        include_timestamp = request.form.get('include_timestamp', 'false').lower() == 'true'
        output_formats = request.form.get('output_formats', '["csv"]')
        
        try:
            import json
            output_formats = json.loads(output_formats)
        except:
            output_formats = ['csv']
        
        # 生成任务ID
        task_id = generate_task_id()
        task_folder = UPLOAD_FOLDER / task_id
        task_folder.mkdir(parents=True, exist_ok=True)
        
        # 保存上传的文件
        uploaded_files = []
        for file in files:
            if file and file.filename and allowed_file(file.filename):
                filename = secure_filename(file.filename)
                file_path = task_folder / filename
                file.save(str(file_path))
                uploaded_files.append(file_path)
                logger.info(f"文件已保存: {file_path}")
        
        if not uploaded_files:
            return jsonify({'error': '没有有效的文件'}), 400
        
        # 创建输出目录
        output_folder = OUTPUT_FOLDER / task_id
        output_folder.mkdir(exist_ok=True)
        
        # 创建传感器提取器
        extractor = SensorExtractor()
        
        results = []
        errors = []
        
        # 处理每个上传的文件
        for file_path in uploaded_files:
            try:
                logger.info(f"开始解析文件: {file_path.name}")
                
                file_results = {
                    'filename': file_path.name,
                    'outputs': []
                }
                
                # 根据请求的格式生成文件
                if 'csv' in output_formats:
                    # 生成EDA CSV
                    eda_file = extractor.save_eda_csv(
                        file_path, 
                        output_dir=output_folder,
                        include_timestamp=include_timestamp
                    )
                    file_results['outputs'].append({
                        'type': 'EDA',
                        'path': eda_file,
                        'filename': Path(eda_file).name
                    })
                    
                    # 生成PPG CSV
                    ppg_file = extractor.save_ppg_csv(
                        file_path,
                        output_dir=output_folder,
                        include_timestamp=include_timestamp
                    )
                    file_results['outputs'].append({
                        'type': 'PPG',
                        'path': ppg_file,
                        'filename': Path(ppg_file).name
                    })
                
                results.append(file_results)
                logger.info(f"文件解析完成: {file_path.name}")
                
            except Exception as e:
                error_msg = f"解析文件 {file_path.name} 时出错: {str(e)}"
                logger.error(error_msg)
                errors.append(error_msg)
        
        # 返回结果
        response_data = {
            'success': True,
            'task_id': task_id,
            'results': results,
            'message': f'成功处理 {len(results)} 个文件'
        }
        
        if errors:
            response_data['errors'] = errors
            response_data['message'] += f'，{len(errors)} 个文件处理失败'
        
        return jsonify(response_data)
        
    except Exception as e:
        logger.error(f"文件转换错误: {e}")
        return jsonify({'error': f'文件转换失败: {str(e)}'}), 500

@file_bp.route('/parse', methods=['POST'])
def parse_files():
    """S1手环数据文件解析接口"""
    try:
        data = request.get_json()
        if not data or 'task_id' not in data:
            return jsonify({'error': '缺少任务ID'}), 400
        
        task_id = data['task_id']
        include_timestamp = data.get('include_timestamp', False)
        output_formats = data.get('formats', ['csv'])  # 默认CSV格式
        
        task_folder = UPLOAD_FOLDER / task_id
        if not task_folder.exists():
            return jsonify({'error': '任务不存在'}), 404
        
        # 创建输出目录
        output_folder = OUTPUT_FOLDER / task_id
        output_folder.mkdir(exist_ok=True)
        
        # 创建传感器提取器
        extractor = SensorExtractor()
        
        results = []
        errors = []
        
        # 处理每个上传的文件
        for file_path in task_folder.glob('*.txt'):
            try:
                logger.info(f"开始解析文件: {file_path.name}")
                
                file_results = {
                    'filename': file_path.name,
                    'outputs': []
                }
                
                # 根据请求的格式生成文件
                if 'csv' in output_formats:
                    # 生成EDA CSV
                    eda_file = extractor.save_eda_csv(
                        file_path, 
                        output_dir=output_folder,
                        include_timestamp=include_timestamp
                    )
                    file_results['outputs'].append({
                        'type': 'EDA',
                        'path': eda_file,
                        'filename': Path(eda_file).name
                    })
                    
                    # 生成PPG CSV
                    ppg_file = extractor.save_ppg_csv(
                        file_path,
                        output_dir=output_folder,
                        include_timestamp=include_timestamp
                    )
                    file_results['outputs'].append({
                        'type': 'PPG',
                        'path': ppg_file,
                        'filename': Path(ppg_file).name
                    })
                
                if 'json' in output_formats:
                    # 生成EDA JSON
                    eda_json_file = extractor.save_eda_json(
                        file_path,
                        output_dir=output_folder,
                        include_timestamp=include_timestamp
                    )
                    file_results['outputs'].append({
                        'type': 'EDA',
                        'path': eda_json_file,
                        'filename': Path(eda_json_file).name
                    })
                    
                    # 生成PPG JSON
                    ppg_json_file = extractor.save_ppg_json(
                        file_path,
                        output_dir=output_folder,
                        include_timestamp=include_timestamp
                    )
                    file_results['outputs'].append({
                        'type': 'PPG',
                        'path': ppg_json_file,
                        'filename': Path(ppg_json_file).name
                    })
                
                # TODO: 添加HDF5格式支持
                
                results.append(file_results)
                logger.info(f"文件解析完成: {file_path.name}")
                
            except Exception as e:
                error_msg = f"解析文件 {file_path.name} 时出错: {str(e)}"
                logger.error(error_msg)
                errors.append(error_msg)
        
        if not results and errors:
            return jsonify({'error': '所有文件解析失败', 'details': errors}), 500
        
        return jsonify({
            'task_id': task_id,
            'results': results,
            'errors': errors,
            'message': f'成功解析 {len(results)} 个文件'
        })
        
    except Exception as e:
        logger.error(f"解析错误: {e}")
        return jsonify({'error': f'解析失败: {str(e)}'}), 500

@file_bp.route('/download/<task_id>')
def download_results(task_id: str):
    """下载S1手环数据解析结果"""
    try:
        logger.info(f"开始下载任务结果: {task_id}")
        output_folder = OUTPUT_FOLDER / task_id
        
        if not output_folder.exists():
            logger.error(f"任务目录不存在: {output_folder}")
            return jsonify({'error': '任务不存在'}), 404
        
        # 获取所有输出文件
        output_files = list(output_folder.glob('*'))
        logger.info(f"找到输出文件: {[f.name for f in output_files]}")
        
        if not output_files:
            logger.error(f"任务 {task_id} 没有找到输出文件")
            return jsonify({'error': '没有找到输出文件'}), 404
        
        # 如果只有一个文件，直接下载
        if len(output_files) == 1:
            logger.info(f"下载单个文件: {output_files[0].name}")
            return send_file(output_files[0], as_attachment=True)
        
        # 多个文件打包成ZIP
        zip_path = TEMP_FOLDER / f"{task_id}_results.zip"
        logger.info(f"创建ZIP文件: {zip_path}")
        
        # 确保temp目录存在
        TEMP_FOLDER.mkdir(exist_ok=True)
        
        with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
            for file_path in output_files:
                logger.info(f"添加文件到ZIP: {file_path.name}")
                zipf.write(file_path, file_path.name)
        
        logger.info(f"ZIP文件创建完成: {zip_path}")
        return send_file(zip_path, as_attachment=True, download_name=f"results_{task_id}.zip")
        
    except Exception as e:
        logger.error(f"下载错误: {e}", exc_info=True)
        return jsonify({'error': f'下载失败: {str(e)}'}), 500

@file_bp.route('/download/<task_id>/<filename>')
def download_single_file(task_id: str, filename: str):
    """下载单个文件"""
    try:
        output_folder = OUTPUT_FOLDER / task_id
        file_path = output_folder / filename
        
        if not file_path.exists():
            return jsonify({'error': '文件不存在'}), 404
        
        return send_file(file_path, as_attachment=True)
        
    except Exception as e:
        logger.error(f"下载单个文件错误: {e}")
        return jsonify({'error': f'下载失败: {str(e)}'}), 500

@file_bp.route('/tasks/<task_id>/status')
def get_task_status(task_id: str):
    """获取任务状态"""
    try:
        upload_folder = UPLOAD_FOLDER / task_id
        output_folder = OUTPUT_FOLDER / task_id
        
        status = {
            'task_id': task_id,
            'upload_exists': upload_folder.exists(),
            'output_exists': output_folder.exists(),
            'uploaded_files': [],
            'output_files': []
        }
        
        if upload_folder.exists():
            status['uploaded_files'] = [f.name for f in upload_folder.glob('*')]
        
        if output_folder.exists():
            status['output_files'] = [f.name for f in output_folder.glob('*')]
        
        return jsonify(status)
        
    except Exception as e:
        logger.error(f"获取任务状态错误: {e}")
        return jsonify({'error': f'获取状态失败: {str(e)}'}), 500