import os
import uuid
import zipfile
import json
from flask import Flask, request, jsonify, send_file
from flask_cors import CORS
from werkzeug.utils import secure_filename
import shutil
from analyzer import create_analyzer

app = Flask(__name__)
CORS(app)

# 配置上传文件夹
UPLOAD_FOLDER = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'uploads')
EXTRACT_FOLDER = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'temp_extracted')
MERGED_FOLDER = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'merged_files')

os.makedirs(UPLOAD_FOLDER, exist_ok=True)
os.makedirs(EXTRACT_FOLDER, exist_ok=True)
os.makedirs(MERGED_FOLDER, exist_ok=True)

app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
app.config['MAX_CONTENT_LENGTH'] = 100 * 1024 * 1024  # 100MB 限制

ALLOWED_EXTENSIONS = {'zip', 'rar', '7z', 'tar', 'gz'}

def allowed_file(filename):
    return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS

@app.route('/api/upload', methods=['POST'])
def upload_file():
    if 'file' not in request.files:
        return jsonify({'error': '没有文件'}), 400
    
    file = request.files['file']
    
    if file.filename == '':
        return jsonify({'error': '没有选择文件'}), 400
    
    if not allowed_file(file.filename):
        return jsonify({'error': '不支持的文件类型'}), 400
    
    task_id = str(uuid.uuid4())
    task_folder = os.path.join(EXTRACT_FOLDER, task_id)
    os.makedirs(task_folder, exist_ok=True)
    
    filename = secure_filename(file.filename)
    file_path = os.path.join(UPLOAD_FOLDER, filename)
    file.save(file_path)
    
    try:
        # 解压文件
        with zipfile.ZipFile(file_path, 'r') as zip_ref:
            zip_ref.extractall(task_folder)
        
        # 获取文件树
        file_tree = get_file_tree(task_folder)
        
        # 将树形结构保存为文件
        tree_file_path = os.path.join(task_folder, "file_tree.txt")
        with open(tree_file_path, 'w', encoding='utf-8') as f:
            f.write(file_tree)
            
        # 同时返回文件列表，用于后续处理
        flat_files = get_flat_files(task_folder)
        
        return jsonify({
            'success': True,
            'task_id': task_id,
            'file_tree': flat_files,
            'tree_text': file_tree
        })
        
    except Exception as e:
        if os.path.exists(task_folder):
            shutil.rmtree(task_folder)
        return jsonify({'error': str(e)}), 500

def get_file_tree(directory):
    """生成格式化的树状结构文件表示"""
    prefix = "    "
    
    def _generate_tree(dir_path, prefix_string="", is_last=True, is_root=False):
        base_name = os.path.basename(dir_path)
        current_line = ""
        
        if not is_root:
            # 添加当前目录的表示
            connector = "└── " if is_last else "├── "
            current_line = prefix_string + connector + base_name + "\n"
        
        # 列出所有目录和文件
        items = os.listdir(dir_path)
        dirs = []
        files = []
        
        for item in items:
            item_path = os.path.join(dir_path, item)
            if os.path.isdir(item_path):
                dirs.append(item)
            else:
                files.append(item)
        
        # 排序
        dirs.sort()
        files.sort()
        
        # 处理子目录
        for i, d in enumerate(dirs):
            is_last_dir = (i == len(dirs) - 1) and len(files) == 0
            next_prefix = prefix_string
            
            if not is_root:
                next_prefix += "    " if is_last else "│   "
            
            current_line += _generate_tree(
                os.path.join(dir_path, d),
                next_prefix,
                is_last_dir,
                False
            )
        
        # 处理文件
        for i, f in enumerate(files):
            is_last_file = i == len(files) - 1
            file_connector = "└── " if is_last_file else "├── "
            next_prefix = prefix_string + ("    " if is_last else "│   ")
            
            current_line += next_prefix + file_connector + f + "\n"
        
        return current_line
    
    # 从根目录开始
    return _generate_tree(directory, "", True, True)

def get_flat_files(directory):
    """获取目录的平面文件列表，用于后续处理"""
    result = []
    
    for root, dirs, files in os.walk(directory):
        # 相对于解压目录的路径
        rel_path = os.path.relpath(root, directory)
        if rel_path == '.':
            rel_path = ''
        
        # 添加文件
        for file in files:
            file_path = os.path.join(rel_path, file)
            result.append(file_path)
    
    return result

@app.route('/api/analyze', methods=['POST'])
def analyze_files():
    """分析上传的文件树"""
    data = request.json
    if not data:
        return jsonify({'error': '没有提供数据'}), 400
    
    task_id = data.get('task_id')
    model_id = data.get('model_id', 'deepseek')  # 默认使用deepseek模型
    
    if not task_id:
        return jsonify({'error': '没有提供task_id'}), 400
    
    task_folder = os.path.join(EXTRACT_FOLDER, task_id)
    if not os.path.exists(task_folder):
        return jsonify({'error': f'找不到任务文件夹: {task_id}'}), 404
    
    # 获取文件列表
    flat_files = get_flat_files(task_folder)
    
    # 创建日志文件
    log_file = os.path.join(task_folder, "analysis_log.txt")
    with open(log_file, 'w', encoding='utf-8') as f:
        f.write(f"开始分析任务: {task_id}\n")
        f.write(f"使用模型: {model_id}\n")
        f.write(f"文件数量: {len(flat_files)}\n")
        f.write("=" * 50 + "\n")
    
    try:
        # 创建分析器并分析文件
        analyzer = create_analyzer(model_id)
        result = analyzer.analyze_files(flat_files, task_id, EXTRACT_FOLDER)
        
        # 检查结果是否包含错误
        if "error" in result:
            error_msg = f"分析过程中出现错误: {result['error']}"
            with open(log_file, 'a', encoding='utf-8') as f:
                f.write(error_msg + "\n")
            return jsonify({'error': error_msg}), 500
        
        # 检查modules是否为空
        if not result.get("modules", []):
            error_msg = "模型返回结果解析错误，未识别到任何模块"
            with open(log_file, 'a', encoding='utf-8') as f:
                f.write(error_msg + "\n")
            return jsonify({'error': error_msg}), 500
        
        # 记录解析结果的基本信息
        with open(log_file, 'a', encoding='utf-8') as f:
            f.write(f"解析成功，共识别出 {len(result.get('modules', []))} 个模块\n")
            for i, module in enumerate(result.get('modules', [])):
                f.write(f"模块 {i+1}: {module.get('name', '未命名')}, 文件数: {len(module.get('files', []))}\n")
        
        # 合并文件
        try:
            merge_files(result, task_id)
            with open(log_file, 'a', encoding='utf-8') as f:
                f.write("文件合并成功\n")
        except Exception as e:
            error_msg = f"合并文件时出错: {str(e)}"
            with open(log_file, 'a', encoding='utf-8') as f:
                f.write(error_msg + "\n")
            return jsonify({'error': error_msg}), 500
        
        return jsonify({
            'success': True,
            'task_id': task_id,
            'modules': result.get('modules', [])
        })
        
    except Exception as e:
        error_msg = f"处理请求时出现未预期错误: {str(e)}"
        with open(log_file, 'a', encoding='utf-8') as f:
            f.write(error_msg + "\n")
        return jsonify({'error': error_msg}), 500

def merge_files(analysis_result, task_id):
    """合并文件内容"""
    task_folder = os.path.join(EXTRACT_FOLDER, task_id)
    merged_task_folder = os.path.join(MERGED_FOLDER, task_id)
    os.makedirs(merged_task_folder, exist_ok=True)
    
    # 合并所有文件到一个总文件
    all_files = analysis_result.get('all_files', [])
    if all_files:
        all_content = []
        for file_path in all_files:
            # 修正文件路径处理方式，使用正确的绝对路径
            # 去掉路径中的前缀，获取相对于task_folder的路径
            if file_path.startswith(f"temp_extracted/{task_id}/"):
                relative_path = file_path[len(f"temp_extracted/{task_id}/"):]
            else:
                relative_path = file_path
                
            abs_path = os.path.join(task_folder, relative_path)
            
            try:
                with open(abs_path, 'r', encoding='utf-8', errors='ignore') as f:
                    content = f.read()
                all_content.append(f"--- {file_path} ---\n\n{content}\n\n")
            except Exception as e:
                print(f"读取文件 {file_path} 时出错: {str(e)}")
        
        # 写入总文件
        with open(os.path.join(merged_task_folder, 'all_merged.txt'), 'w', encoding='utf-8') as f:
            f.write('\n'.join(all_content))
    
    # 合并每个模块的文件
    modules = analysis_result.get('modules', [])
    for module in modules:
        module_name = module.get('name', '未命名模块')
        module_files = module.get('files', [])
        
        if module_files:
            module_content = []
            for file_path in module_files:
                # 修正文件路径处理方式，使用正确的绝对路径
                # 去掉路径中的前缀，获取相对于task_folder的路径
                if file_path.startswith(f"temp_extracted/{task_id}/"):
                    relative_path = file_path[len(f"temp_extracted/{task_id}/"):]
                else:
                    relative_path = file_path
                    
                abs_path = os.path.join(task_folder, relative_path)
                
                try:
                    with open(abs_path, 'r', encoding='utf-8', errors='ignore') as f:
                        content = f.read()
                    module_content.append(f"--- {file_path} ---\n\n{content}\n\n")
                except Exception as e:
                    print(f"读取文件 {file_path} 时出错: {str(e)}")
            
            # 写入模块文件
            safe_module_name = ''.join(c if c.isalnum() or c in [' ', '_', '-'] else '_' for c in module_name)
            with open(os.path.join(merged_task_folder, f"{safe_module_name}.txt"), 'w', encoding='utf-8') as f:
                f.write('\n'.join(module_content))

@app.route('/api/tree', methods=['GET'])
def get_file_tree_text():
    """获取任务的文件树文本"""
    task_id = request.args.get('task_id')
    if not task_id:
        return jsonify({'error': '没有提供task_id'}), 400
    
    tree_file_path = os.path.join(EXTRACT_FOLDER, task_id, "file_tree.txt")
    if not os.path.exists(tree_file_path):
        return jsonify({'error': '找不到文件树文件'}), 404
    
    try:
        with open(tree_file_path, 'r', encoding='utf-8') as f:
            tree_text = f.read()
        
        return jsonify({
            'success': True,
            'task_id': task_id,
            'tree_text': tree_text
        })
        
    except Exception as e:
        return jsonify({'error': str(e)}), 500

@app.route('/api/modules', methods=['GET'])
def get_task_modules():
    """获取任务的模块列表"""
    task_id = request.args.get('task_id')
    if not task_id:
        return jsonify({'error': '没有提供task_id'}), 400
    
    result_file = os.path.join(EXTRACT_FOLDER, task_id, "analysis_result.json")
    if not os.path.exists(result_file):
        return jsonify({'error': '找不到分析结果文件'}), 404
    
    try:
        with open(result_file, 'r', encoding='utf-8') as f:
            result = json.load(f)
        
        return jsonify({
            'success': True,
            'task_id': task_id,
            'modules': result.get('modules', [])
        })
        
    except Exception as e:
        return jsonify({'error': str(e)}), 500

@app.route('/api/download', methods=['GET'])
def download_file():
    """下载合并后的文件"""
    task_id = request.args.get('task_id')
    module_name = request.args.get('module')
    
    if not task_id:
        return jsonify({'error': '没有提供task_id'}), 400
    
    merged_task_folder = os.path.join(MERGED_FOLDER, task_id)
    
    if not os.path.exists(merged_task_folder):
        return jsonify({'error': '找不到合并文件文件夹'}), 404
    
    try:
        if module_name:
            # 下载特定模块的文件
            safe_module_name = ''.join(c if c.isalnum() or c in [' ', '_', '-'] else '_' for c in module_name)
            file_path = os.path.join(merged_task_folder, f"{safe_module_name}.txt")
            if not os.path.exists(file_path):
                return jsonify({'error': f'找不到模块文件: {module_name}'}), 404
            
            return send_file(
                file_path,
                as_attachment=True,
                download_name=f"{safe_module_name}.txt",
                mimetype='text/plain'
            )
        else:
            # 下载所有合并的文件
            file_path = os.path.join(merged_task_folder, 'all_merged.txt')
            if not os.path.exists(file_path):
                return jsonify({'error': '找不到合并文件'}), 404
            
            return send_file(
                file_path,
                as_attachment=True,
                download_name=f"all_merged_{task_id}.txt",
                mimetype='text/plain'
            )
            
    except Exception as e:
        return jsonify({'error': str(e)}), 500

@app.route('/api/models', methods=['GET'])
def get_models():
    """获取可用的AI模型列表"""
    models = [
        {
            "id": "deepseek",
            "name": "Deepseek Reasoner"
        },
        {
            "id": "qwen",
            "name": "Qwen Max"
        },
        {
            "id": "gemini",
            "name": "Gemini 2.5"
        }
    ]
    return jsonify(models)

if __name__ == '__main__':
    app.run(debug=True, port=5000) 