from flask import Flask, render_template, request, jsonify, send_file
import json
import os
import tempfile
import threading
import time
from src.dedup_factory import DedupFactory
from dedup_mul_union_with_progress import filter_len  # 保持向后兼容

app = Flask(__name__)

# 全局变量存储处理状态
processing_status = {}
processing_tasks = {}  # 存储正在处理的任务线程

class ProgressTracker:
    def __init__(self, task_id):
        self.task_id = task_id
        self.status = "started"
        self.progress = 0
        self.message = "开始处理..."
        self.result = None
        self.error = None
        
    def update(self, progress, message):
        self.progress = progress
        self.message = message
        processing_status[self.task_id] = self
        
    def complete(self, result):
        self.progress = 100
        self.status = "completed"
        self.message = "处理完成"
        self.result = result
        processing_status[self.task_id] = self
        
    def fail(self, error):
        self.status = "failed"
        self.error = str(error)
        self.message = f"处理失败: {error}"
        processing_status[self.task_id] = self

@app.route('/')
def index():
    # 获取可用的去重方法信息
    methods = DedupFactory.get_available_methods()
    return render_template('index.html', methods=methods)

@app.route('/api/methods')
def get_methods():
    """获取所有可用的去重方法"""
    methods = DedupFactory.get_available_methods()
    return jsonify(methods)

@app.route('/api/methods/<method_name>')
def get_method_info(method_name):
    """获取特定去重方法的详细信息"""
    try:
        method_info = DedupFactory.get_method_info(method_name)
        if method_info:
            return jsonify(method_info)
        else:
            return jsonify({'error': '方法不存在'}), 404
    except Exception as e:
        return jsonify({'error': str(e)}), 500

@app.route('/upload', methods=['POST'])
def upload_file():
    try:
        if 'file' not in request.files:
            return jsonify({'error': '没有文件上传'}), 400
            
        file = request.files['file']
        if file.filename == '':
            return jsonify({'error': '没有选择文件'}), 400
            
        # 获取参数（兼容前端使用的'method'命名）
        dedup_method = request.form.get('dedup_method') or request.form.get('method') or 'minhash'
        min_length = int(request.form.get('min_length', 50))
        
        # 获取方法特定的参数
        method_params = {}
        method_info = DedupFactory.get_method_info(dedup_method)
        if method_info:
            for param_name in method_info.get('parameters', {}).keys():
                # 读取参数，保留一个简单的别名兼容（如 band_size <-> bands_size）
                param_value = request.form.get(param_name)
                if param_value is None and param_name == 'band_size':
                    param_value = request.form.get('bands_size')
                if param_value is not None:
                    method_params[param_name] = param_value
        
        # 验证参数
        try:
            validated_params = DedupFactory.validate_parameters(dedup_method, method_params)
        except ValueError as e:
            return jsonify({'error': f'参数验证失败: {str(e)}'}), 400
        
        # 生成任务ID
        task_id = str(int(time.time() * 1000))
        tracker = ProgressTracker(task_id)
        processing_status[task_id] = tracker
        
        # 先保存文件到临时位置
        temp_dir = tempfile.mkdtemp()
        input_path = os.path.join(temp_dir, 'input.jsonl')
        file.save(input_path)
        
        # 在后台线程中处理文件路径
        thread = threading.Thread(
            target=process_file,
            args=(input_path, task_id, dedup_method, validated_params, min_length)
        )
        thread.start()
        
        return jsonify({'task_id': task_id})
        
    except Exception as e:
        return jsonify({'error': str(e)}), 500

def process_file(input_path, task_id, dedup_method, method_params, min_length):
    try:
        tracker = processing_status[task_id]
        
        # 读取文件
        tracker.update(10, "读取文件内容...")
        raw_entries = []
        texts = []
        
        # 使用上下文管理器确保文件正确关闭
        with open(input_path, 'r', encoding='utf-8') as f:
            lines = f.readlines()
            
        total_lines = len(lines)
        for i, line in enumerate(lines):
            if not line.strip():
                continue
            try:
                entry = json.loads(line)
                if 'text' in entry:
                    raw_entries.append(entry)
                    texts.append(entry['text'])
            except json.JSONDecodeError:
                pass
            
            # 更新进度
            if total_lines > 0 and i % max(1, total_lines // 20) == 0:
                progress = 10 + (i / total_lines) * 20
                tracker.update(int(progress), f"读取文件内容... ({i+1}/{total_lines})")
        
        # 过滤文本
        tracker.update(30, "过滤无效文本...")
        filtered_texts = []
        filtered_out = []
        
        total_texts = len(texts)
        for i, text in enumerate(texts):
            is_valid, reason = filter_len(text, min_length)
            if is_valid:
                filtered_texts.append(text)
            else:
                filtered_out.append((text, reason))
                
            # 更新进度
            if total_texts > 0 and i % max(1, total_texts // 10) == 0:
                progress = 30 + (i / total_texts) * 10
                tracker.update(int(progress), f"过滤文本... ({i+1}/{total_texts})")
        
        # 执行去重
        tracker.update(40, f"执行{dedup_method}去重处理...")
        
        try:
            # 创建去重器实例
            deduper = DedupFactory.create_deduper(dedup_method, filtered_texts, **method_params)
            
            # 设置进度回调函数
            def progress_callback(current, total, phase):
                if total > 0:
                    # 计算去重阶段的进度 (40-90%)
                    progress = 40 + (current / total) * 50
                    tracker.update(int(progress), f"{phase}... ({current}/{total})")
            
            deduper.set_progress_callback(progress_callback)
            
            # 执行去重处理
            result = deduper.execute()
            
            # 获取重复对信息
            duplicate_pairs = deduper.get_duplicate_pairs()
            
        except Exception as e:
            raise Exception(f"去重算法执行失败: {str(e)}")
        
        # 构建结果
        tracker.update(90, "构建输出结果...")
        text_to_keep = set(result)
        deduped_entries = [
            entry for entry in raw_entries 
            if entry.get('text', '') in text_to_keep
        ]
        
        # 获取输入文件的目录来保存输出文件
        output_dir = os.path.dirname(input_path)
        
        # 保存结果
        output_path = os.path.join(output_dir, 'output.jsonl')
        with open(output_path, 'w', encoding='utf-8') as f:
            for entry in deduped_entries:
                f.write(json.dumps(entry, ensure_ascii=False) + '\n')
        
        # 保存过滤掉的文本
        filtered_path = os.path.join(output_dir, 'filtered.jsonl')
        with open(filtered_path, 'w', encoding='utf-8') as f:
            for text, reason in filtered_out:
                entry = {
                    "reason": reason,
                    "text": text
                }
                f.write(json.dumps(entry, ensure_ascii=False) + '\n')
        
        # 保存相似对
        pairs_path = os.path.join(output_dir, 'duplicate_pairs.jsonl')
        with open(pairs_path, 'w', encoding='utf-8') as f:
            for pair in duplicate_pairs:
                f.write(json.dumps(pair, ensure_ascii=False) + '\n')
        
        # 保存路径信息
        result_info = {
            'output_path': output_path,
            'filtered_path': filtered_path,
            'pairs_path': pairs_path,
            'stats': {
                'original_count': len(texts),
                'filtered_count': len(filtered_texts),
                'deduped_count': len(deduped_entries),
                'duplicate_count': len(texts) - len(deduped_entries),
                'duplicate_pairs_count': len(duplicate_pairs),
                'method': dedup_method
            }
        }
        
        tracker.complete(result_info)
        
    except Exception as e:
        tracker.fail(e)

@app.route('/status/<task_id>')
def get_status(task_id):
    if task_id in processing_status:
        tracker = processing_status[task_id]
        return jsonify({
            'status': tracker.status,
            'progress': tracker.progress,
            'message': tracker.message,
            'error': tracker.error,
            'result': tracker.result
        })
    else:
        return jsonify({'status': 'not_found'}), 404

@app.route('/download/<task_id>/<file_type>')
def download_file(task_id, file_type):
    if task_id not in processing_status:
        return jsonify({'error': '任务不存在'}), 404
        
    tracker = processing_status[task_id]
    if tracker.status != 'completed':
        return jsonify({'error': '任务未完成'}), 400
        
    if not tracker.result:
        return jsonify({'error': '没有结果'}), 400
        
    file_map = {
        'result': tracker.result['output_path'],
        'filtered': tracker.result['filtered_path'],
        'pairs': tracker.result['pairs_path']
    }
    
    if file_type not in file_map:
        return jsonify({'error': '文件类型不存在'}), 404
        
    file_path = file_map[file_type]
    if not os.path.exists(file_path):
        return jsonify({'error': '文件不存在'}), 404
        
    return send_file(file_path, as_attachment=True)

@app.route('/stop/<task_id>', methods=['POST'])
def stop_processing(task_id):
    """停止处理任务"""
    if task_id not in processing_status:
        return jsonify({'error': '任务不存在'}), 404
    
    tracker = processing_status[task_id]
    if tracker.status == 'completed' or tracker.status == 'failed':
        return jsonify({'error': '任务已完成或失败'}), 400
    
    # 标记任务为已停止
    tracker.status = 'stopped'
    tracker.message = '用户手动停止处理'
    tracker.error = '用户手动停止处理'
    
    return jsonify({'status': 'stopped'})

if __name__ == '__main__':
    app.run(debug=True, host='0.0.0.0', port=5000)
