import os
import json
import csv
import threading
from datetime import datetime
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import requests
from flask import Flask, jsonify
from collections import defaultdict
import pandas as pd
import time
from utils.logger import setup_logging, app_logger, log_function
import hashlib
from .statistics import StatisticsService

class FileHandler(FileSystemEventHandler):
    def __init__(self, service):
        self.service = service
        self.processed_files = {}  # 改用dict存储文件路径和对应的hash
        
    def get_file_hash(self, file_path):
        """计算文件的MD5哈希值"""
        hash_md5 = hashlib.md5()
        with open(file_path, "rb") as f:
            for chunk in iter(lambda: f.read(4096), b""):
                hash_md5.update(chunk)
        return hash_md5.hexdigest()
    
    def process_if_needed(self, file_path):
        """处理文件如果还未处理过或文件已更改"""
        if file_path.endswith('.json'):
            current_hash = self.get_file_hash(file_path)
            if (file_path not in self.processed_files or 
                self.processed_files[file_path] != current_hash):
                self.service.process_file(file_path)
                self.processed_files[file_path] = current_hash
    
    def on_created(self, event):
        if not event.is_directory:
            self.process_if_needed(event.src_path)

class TextDetectionService:
    def __init__(self, config):
        self.config = config
        self.text_config = config['text_detection']
        self.api_config = config['api']
        self.flask_app = Flask(__name__)
        self.observer = None
        self.running = False
        self.stats_timer = None
        
        # 设置日志器
        self.loggers = setup_logging(
            log_dir=config['global']['log_dir'],
            service_name='text_detection'
        )
        # 设置服务上下文
        app_logger.set_context(service='text_detection')
        
        # 初始化日志装饰器用的logger
        self.process_logger = app_logger.get_logger(
            name='text_detection.info',
            log_dir=config['global']['log_dir']
        )
        
        # 直接设置装饰器的logger
        self.process_file = log_function(logger=self.process_logger)(self.process_file)

        @self.flask_app.route('/statistics/generate', methods=['POST'])
        def generate_statistics():
            self.generate_statistics_manual()
            return jsonify({"status": "ok", "message": "统计任务已触发"}), 200

        self.statistics_service = StatisticsService(config, self.loggers)

    def process_file(self, file_path):
        """处理单个文件，返回处理结果统计"""
        try:
            # 检查文件是否在正确的目录
            if not file_path.startswith(self.text_config['input_dir']):
                self.loggers['error'].error(f"文件不在输入目录中: {file_path}")
                return

            # 检查是否是输出或统计目录
            if any(x in file_path for x in ['output', 'statistics']):
                self.loggers['warn'].warning(f"跳过输出或统计目录中的文件: {file_path}")
                return

            # 首先计算总行数
            total_lines = sum(1 for _ in open(file_path, 'r', encoding='utf-8'))
            file_name = os.path.basename(file_path)
            self.loggers['info'].info(
                f"开始处理任务:\n"
                f"- 文件总数: 1\n"
                f"- 总行数: {total_lines}"
            )
            
            results = []
            processed_lines = 0
            success_count = 0
            file_count = 0
            last_progress = 0
            start_time = time.time()
            
            def print_progress():
                """打印进度条"""
                progress = processed_lines / total_lines * 100
                bar_length = 50
                filled_length = int(bar_length * processed_lines / total_lines)
                bar = '=' * filled_length + '-' * (bar_length - filled_length)
                
                # 计算已用时间和预估剩余时间
                elapsed_time = time.time() - start_time
                minutes = int(elapsed_time // 60)
                seconds = int(elapsed_time % 60)
                
                if progress > 0:
                    total_time = elapsed_time / (progress / 100)
                    remaining_time = total_time - elapsed_time
                    remaining_minutes = int(remaining_time // 60)
                    remaining_seconds = int(remaining_time % 60)
                    time_info = f"用时: {minutes}分{seconds}秒 预计剩余: {remaining_minutes}分{remaining_seconds}秒"
                else:
                    time_info = f"用时: {minutes}分{seconds}秒"
                
                self.loggers['info'].info(
                    f"总进度: [{bar}] {progress:.1f}% ({processed_lines}/{total_lines}行) "
                    f"成功: {success_count} 文件: {file_count}/1 {time_info}\n"
                    f"当前处理: {file_name}"
                )
            
            def save_results(results_to_save, current_file_count):
                """保存结果到件"""
                timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
                output_file = os.path.join(
                    output_dir,
                    f"检测结果_{timestamp}_{current_file_count}.csv"
                )
                
                os.makedirs(os.path.dirname(output_file), exist_ok=True)
                with open(output_file, 'w', encoding='utf-8', newline='') as f:
                    writer = csv.DictWriter(f, fieldnames=self.text_config['output_fields'])
                    writer.writeheader()
                    writer.writerows(results_to_save)
                
                self.loggers['info'].info(f"保存结果文件: {output_file}")
                return []  # 返回空列表以清空结果
            
            # 生成输出目录
            output_dir = self.text_config['output_dir']
            if self.text_config['daily_dir']:
                output_dir = os.path.join(output_dir, datetime.now().strftime('%Y-%m-%d'))
            os.makedirs(output_dir, exist_ok=True)
            
            # 按行读取JSON
            with open(file_path, 'r', encoding='utf-8') as f:
                batch = []
                for line in f:
                    processed_lines += 1
                    try:
                        item = json.loads(line.strip())
                        # 获取需要检测的文本
                        text = item.get(self.text_config['input_fields'][0], '')
                        if not text:
                            self.loggers['warn'].warning(f"跳过空文本: {item}")
                            continue
                            
                        if len(text) < self.text_config['length']:
                            self.loggers['info'].info(f"跳过短文本: {text}")
                            continue
                        
                        batch.append((item, text))
                        
                        # 当批次达到指定大小时处理
                        if len(batch) >= self.text_config['batch_size']:
                            processed = self._process_batch(batch)
                            if processed:
                                results.extend(processed)
                                success_count += len(processed)
                                
                                # 检查是否需要保存文件
                                if len(results) >= self.text_config['batch_size']:
                                    file_count += 1
                                    results = save_results(results, file_count)
                                
                            batch = []
                            
                        # 每处理1000行打印一次进度
                        if processed_lines % 1000 == 0:
                            print_progress()
                            
                    except json.JSONDecodeError as e:
                        self.loggers['error'].error(f"JSON解析失败: 行 {str(e)}")
                        continue
                    except Exception as e:
                        self.loggers['error'].error(f"处理行失败: {str(e)}")
                        continue
                
                # 处理最后一批
                if batch:
                    processed = self._process_batch(batch)
                    if processed:
                        results.extend(processed)
                        success_count += len(processed)
                
                # 保存最后的结果
                if results:
                    file_count += 1
                    save_results(results, file_count)
                
                # 计算处理时间
                end_time = time.time()
                elapsed_time = end_time - start_time
                minutes = int(elapsed_time // 60)
                seconds = int(elapsed_time % 60)
                
                # 打印最终进度和统计
                print_progress()
                self.loggers['info'].info(
                    f"任务完成:\n"
                    f"- 总文件数: 1\n"
                    f"- 总行数: {total_lines}\n"
                    f"- 成功处理: {success_count}\n"
                    f"- 总用时: {minutes}分{seconds}秒\n"
                    f"- 平均速度: {processed_lines/elapsed_time:.1f}行/秒"
                )
                
            # 处理完成后返回统计信息
            return {
                'processed_lines': processed_lines,
                'success_count': success_count,
                'file_count': file_count
            }
                
        except Exception as e:
            self.loggers['error'].error(f"处理文件失败: {file_path}, 错误: {str(e)}")
            return None
    
    def start(self):
        """启动服务"""
        self.running = True
        
        # 创建必要的目录
        os.makedirs(self.text_config['output_dir'], exist_ok=True)
        os.makedirs(self.text_config['statistics_dir'], exist_ok=True)
        
        if self.text_config['enabled']:
            # 启动文本检测服务
            self.loggers['info'].info("启动文本检测服务...")
            
            # 启动Flask
            flask_thread = threading.Thread(
                target=lambda: self.flask_app.run(
                    host=self.config['flask']['host'],
                    port=self.config['flask']['port']
                )
            )
            flask_thread.daemon = True
            flask_thread.start()
            self.loggers['info'].info("Flask 服务已启动")
            
            # 处理文件监控
            event_handler = FileHandler(self)
            self.observer = Observer()
            self.observer.schedule(
                event_handler,
                self.text_config['input_dir'],
                recursive=False
            )
            self.observer.start()
            self.loggers['info'].info(f"开始监控目录: {self.text_config['input_dir']}")
            
            # 处理现有文件
            existing_files = [f for f in os.listdir(self.text_config['input_dir']) 
                             if f.endswith('.json')]
            if existing_files:
                self.loggers['info'].info(f"发现 {len(existing_files)} 个待处理文件")
                for file_name in existing_files:
                    file_path = os.path.join(self.text_config['input_dir'], file_name)
                    event_handler.process_if_needed(file_path)
                
                # 等待所有文件处理完成
                while len(event_handler.processed_files) < len(existing_files):
                    time.sleep(1)
                
                self.loggers['info'].info("所有文件处理完成")
                
                # 如果统计功能启用，在文件处理完成后执行统计
                if self.text_config['statistics']['enabled']:
                    self.loggers['info'].info("文本检测完成，开始执行统计任务...")
                    self.statistics_service.generate_statistics()
        else:
            self.loggers['info'].info("文本检测服务未启用")
        
        # 启动统计服务
        self.statistics_service.start()
    
    def stop(self):
        """停止服务"""
        self.running = False
        if self.observer:
            self.observer.stop()
            self.observer.join()
        self.statistics_service.stop()
    
    def call_api(self, text):
        """调用API检测文本"""
        for attempt in range(self.api_config['max_retries']):
            try:
                self.loggers['debug'].debug(f"调用API, 尝试数: {attempt + 1}")
                response = requests.post(
                    self.api_config['url'],
                    json={"text": text},
                    timeout=self.api_config['timeout']
                )
                response.raise_for_status()
                
                # 检查响应格式
                data = response.json()
                if not data or 'data' not in data or 'fake_conf' not in data['data']:
                    raise ValueError(f"API响应格式错误: {data}")
                    
                self.loggers['debug'].debug(f"API调用成功: {data}")
                return data
                
            except requests.exceptions.RequestException as e:
                self.loggers['error'].error(f"API调用失败: {str(e)}")
                if attempt == self.api_config['max_retries'] - 1:
                    raise
                time.sleep(1 * (attempt + 1))  # 指数退避
            except Exception as e:
                self.loggers['error'].error(f"API调用异常: {str(e)}")
                raise
                
        raise Exception("API调用超过大重试次数")

    def generate_statistics_manual(self):
        """手动触发统计"""
        if not self.text_config['statistics']['enabled']:
            self.loggers['warn'].warning("统计功能未启用")
            return
        
        if self.text_config['statistics']['mode'] != 'manual':
            self.loggers['warn'].warning("当前是手动统计模式")
            return
        
        self.loggers['info'].info("=== 开始手动统计 ===")
        self.statistics_service.generate_statistics()

    def _process_batch(self, batch):
        """批量处理记录"""
        results = []
        for item, text in batch:
            try:
                # 调用API
                response = self.call_api(text)
                if not response or 'data' not in response or 'fake_conf' not in response['data']:
                    self.loggers['error'].error(f"API响应格式错误: {response}")
                    continue
                    
                fake_conf = response['data']['fake_conf']
                
                # 判断结果
                if fake_conf >= self.text_config['fake_conf_threshold']:
                    category = "LLM"
                    confidence = fake_conf * 100
                else:
                    category = "HUMAN"
                    confidence = (1 - fake_conf) * 100
                
                # 构建结果
                result = {
                    'uuid': item.get('uuid', ''),
                    '文本': text,
                    '检测结果': category,
                    '概率': f"{confidence:.2f}%"
                }
                results.append(result)
                
            except Exception as e:
                self.loggers['error'].error(f"处理记录失: {str(e)}, 文本: {text[:100]}...")
        
        return results