import os
import glob
import time
import queue
import threading
import traceback
from datetime import datetime
from sinotrans.utils import Logger
from dao.email_dao import EmailDao
from dao.field_map_dao import FieldMapDao
from concurrent.futures import ThreadPoolExecutor
from infrastructure.db_pool_manager import DatabasePoolManager
from concurrent.futures import TimeoutError as FutureTimeoutError

import streamlit as st
from service.email_analyse import EmlAidata
from sinotrans.core import EmailClient, FileProcessor, ExcelProcessor
# 在EmailProcessor类的__init__方法中添加
from service.simple_memory_monitor import SimpleMemoryMonitor

class EmailProcessor:
    def __init__(self, dao_config_path="conf/dao_config.toml", max_workers=3, heartbeat_interval=20, processed_uids_max_size=20, max_results_memory_limit=10, max_deleted_email_limit=10):
        self.message_queue = queue.Queue()
        self.stop_event = threading.Event()
        self.processing_thread = None
        self.is_running = False
        
        # 并发处理配置
        self.max_workers = max_workers
        self.executor = None
        self.result_queue = queue.Queue()
        
        # 线程安全的统计数据
        self._stats_lock = threading.Lock()
        self._processing_count = 0
        self._processing_uids = set()
        self._processed_uids = set()  # 新增：已处理但未删除的邮件缓存
        self._processed_uids_max_size = processed_uids_max_size  # 缓存大小限制
        self.max_results_memory_limit = max_results_memory_limit
        self.max_deleted_email_limit = max_deleted_email_limit
        self._processing_uids = set() 

        # 添加统计数据到EmailProcessor中
        self.total_success_count = 0
        self.total_error_count = 0
        self.total_processed_count = 0
        
        # 心跳机制相关
        self._last_heartbeat_time = time.time()
        self._heartbeat_interval = heartbeat_interval
        
        # 初始化DAO实例
        self.db_pool_manager = DatabasePoolManager(dao_config_path)
        self.email_dao = EmailDao(dao_config_path)
        self.field_map_dao = FieldMapDao(dao_config_path)

        # 从session state获取配置参数
        self.config_params = st.session_state["config_params"]
        self.field_mapping_cache = st.session_state["field_mapping_cache"]
        
        # 在EmailProcessor中创建EmailClient实例
        self.email_client = self._create_email_client()
        self._email_client_lock = threading.RLock()
        
        # 在EmailProcessor中创建EmlAidata实例
        self.eml_aidata = self._create_eml_aidata()

        # 初始化心跳线程
        self.heartbeat_thread = None
        
        self._init_logger()
        
        # 初始化简化内存监控
        self.memory_monitor = SimpleMemoryMonitor(monitor_interval=30)
        
    def _create_email_client(self):
        """创建EmailClient实例"""
        try:
            email_client = EmailClient(
                self.config_params.get('imap_server', 'owa.sinotrans.com'),
                self.config_params.get('imap_port', 993),
                self.config_params.get('imap_username', 'sit_RPA-WYWLolp@i.sinotrans.com'),
                self.config_params.get('imap_password', '29658-cdafr'),
                self.config_params.get('email_folder', 'INBOX')
            )
            Logger.info("✅ EmailClient实例创建成功")
            return email_client
        except Exception as e:
            raise Exception(f"❌ EmailClient创建失败: {traceback.format_exc()}")
            
    def _create_eml_aidata(self):
        """创建EmlAidata实例"""
        try:
            # 准备配置参数
            config = self.config_params.copy()
            config['forward_patterns'] = st.session_state.model_config['forward_patterns']
            
            # 创建EmlAidata实例
            eml_aidata = EmlAidata.from_config(config)
            
            Logger.info("✅ EmlAidata实例创建成功")
            return eml_aidata
        except Exception as e:
            raise Exception(f"❌ EmlAidata创建失败: {traceback.format_exc()}")
    def _init_logger(self):
        """初始化日志系统"""
        current_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
        debug_path = os.path.join(current_dir, "logs")
        Logger(debug_path=debug_path)
    def start(self):
        """启动邮件处理器"""
        if self.is_running:
            Logger.error("邮件处理器已在运行中")
            return False
            
        if not self.eml_aidata:
            Logger.error("EmlAidata实例未初始化")
            return False
            
        if not self.email_dao:
            Logger.error("EmailDao实例未初始化，无法保存到数据库")
            return False

        if not self.field_map_dao:
            Logger.error("FieldMapDao实例未初始化，无法保存字段映射")
            return False

        if not self.field_mapping_cache:
            Logger.error("session state中未加载字段映射缓存")
            return False

        if not self.config_params:
            Logger.error("session state中未加载配置参数")
            return False

        try:
            # 初始化EmlAidata的单邮件处理模式
            if not self.eml_aidata.initialize_for_single_processing():
                Logger.error("EmlAidata单邮件处理模式初始化失败")
                return False
                
            # 重置停止事件和心跳时间
            self.stop_event.clear()
            
            # 初始化线程池
            self.executor = ThreadPoolExecutor(max_workers=self.max_workers, thread_name_prefix="EmailWorker")
            
            # 启动内存监控
            self.memory_monitor.start_monitoring()
            
            # 初始化心跳线程
            self.heartbeat_thread = threading.Thread(target=self._heartbeat_worker, name="HeartbeatThread")
            self.heartbeat_thread.daemon = True
            self.heartbeat_thread.start()
            
            # 启动主处理线程
            self.processing_thread = threading.Thread(target=self._process_emails_loop_concurrent, name="EmailProcessingThread")
            self.processing_thread.daemon = True
            self.processing_thread.start()
            
            # 启动结果收集线程
            self.result_thread = threading.Thread(target=self._result_collector_loop, name="ResultCollectorThread")
            self.result_thread.daemon = True
            self.result_thread.start()
            
            # 注册线程和队列到内存监控
            self.memory_monitor.register_thread("HeartbeatThread", self.heartbeat_thread)
            self.memory_monitor.register_thread("EmailProcessingThread", self.processing_thread)
            self.memory_monitor.register_thread("ResultCollectorThread", self.result_thread)
            
            # 注册队列
            self.memory_monitor.register_queue("message_queue", self.message_queue)
            self.memory_monitor.register_queue("result_queue", self.result_queue)
            
            self.is_running = True
            Logger.info(f"✅ 并发邮件处理器启动成功，工作线程数: {self.max_workers}")
            return True
                
        except Exception as e:
            Logger.error(f"❌ 启动邮件处理器失败: {e}")
            return False
    def stop(self):
        """停止邮件处理"""
        try:
            # 设置停止事件
            self.stop_event.set()

            # 关闭线程池
            if self.executor:
                self.executor.shutdown()  # 等待线程池完全关闭
                
            self.is_running = False
            Logger.info("✅ 邮件处理器已停止")
            return True
            
        except Exception as e:
            Logger.error(f"停止邮件处理器失败: {e}")
            # 即使出现异常，也要确保状态被重置
            self.is_running = False
            self.stop_event.set()
            return True  # 返回True，因为我们已经尽力停止了
            
    def _get_unprocessed_email_uids(self):
        """获取未处理的邮件UIDs - 防止重复处理"""
        try:
            with self._email_client_lock:
                # 搜索未读邮件
                status, messages = self.email_client.search_mail('ALL', None)
                email_uids = messages[0].split()
                if not email_uids:
                    Logger.info(f"📧 未获取到任何可处理邮件")
                    return []
                
                with self._stats_lock:
                    # print(f"self._processing_uids: {self._processing_uids}")
                    # print(f"self._processed_uids: {self._processed_uids}")
                    # 同时过滤正在处理和已处理的邮件UIDs
                    available_uids = [
                        uid for uid in email_uids 
                        if uid not in self._processing_uids and uid not in self._processed_uids
                    ]
            Logger.info(f"📧 获取到 {len(available_uids)} 封可处理邮件（总共 {len(email_uids)} 封未读邮件）")
            return available_uids
            
        except TimeoutError as e:
            Logger.error(f"⏰ 搜索邮件超时: {e}")
            return []
        except Exception as e:
            Logger.error(f"❌ 获取邮件UIDs失败: {e}")
            return []
    def _process_emails_loop_concurrent(self):
        """并发处理邮件的主循环"""
        try:
            Logger.info("开始并发邮件处理循环")
            
            # 初始化正在处理的邮件UIDs集合
            while not self.stop_event.is_set():
                email_uid = None
                try:
                    # 直接在主循环中处理邮件批次
                    available_workers = self.max_workers - self._processing_count
                    # print(f"当前可用工作线程数: {available_workers}")
                    if available_workers <= 0:
                        if self.stop_event.wait(2):
                            continue
                    # 获取未处理的邮件UIDs
                    email_uids = self._get_unprocessed_email_uids()
                    if not email_uids:
                        if self.stop_event.wait(2):
                            continue
                    else:
                        self.remaining_emails = len(email_uids)
                    # 批量提交邮件处理任务
                    batch_size = min(available_workers, len(email_uids))
                    for i in range(batch_size):
                        if self.stop_event.is_set():
                            break
                        email_uid = email_uids[i]
                        
                        try:
                            with self._email_client_lock:
                                status, msg_data = self._fetch_email_with_timeout(email_uid, 100)
                        except TimeoutError as e:
                            with self._stats_lock:
                                self._processed_uids.add(email_uid)
                                self.total_error_count += 1
                                self.total_processed_count += 1
                            Logger.error(f"❌ 获取邮件 {email_uid} 时出错: {str(e)}")
                            continue

                        if status != 'OK':
                            with self._stats_lock:
                                self._processed_uids.add(email_uid)
                                self.total_error_count += 1
                                self.total_processed_count += 1
                            Logger.error(f"❌ 获取邮件失败：{email_uid}, 状态：{status}, 错误信息：{msg_data}")
                            continue
                        
                        if not msg_data or len(msg_data) <= 0 or not any(item is not None for item in msg_data):
                            with self._stats_lock:
                                self._processed_uids.add(email_uid)
                                self.total_error_count += 1
                                self.total_processed_count += 1
                            Logger.error(f"❌ 获取邮件失败：{email_uid}, 状态：{status}, 错误信息：{msg_data}, 邮件可能已经不存在")
                            continue

                        # 标记邮件为正在处理
                        with self._stats_lock:
                            self._processing_uids.add(email_uid)
                            self._processing_count += 1

                        future = self.executor.submit(self._process_email_worker, email_uid, msg_data)
                        self.result_queue.put(future)
                        Logger.info(f"✅ 已添加邮件处理任务: {email_uid}")
                    
                    if self.stop_event.wait(2):
                        break
                        
                except Exception as e:
                    Logger.error(f"❌ 并发处理邮件时出错: {traceback.format_exc()}")
                    if email_uid:
                        with self._stats_lock:
                            self._processing_uids.discard(email_uid)
                            self._processing_count -= 1
                    if self.stop_event.wait(5):
                        break
                
        except Exception as e:
            Logger.error(f"❌ 并发邮件处理主循环出错: {e}")
        finally:
            Logger.info("并发邮件处理循环结束")

    def _process_email_worker(self, email_uid, msg_data):
        """处理单封邮件的工作线程 - 简化版本"""
        # 设置当前线程名为邮件UID
        current_thread = threading.current_thread()
        original_name = current_thread.name
        current_thread.name = f"EmailWorker-{email_uid}"
        
        # 注册到内存监控
        if hasattr(self, 'memory_monitor'):
            self.memory_monitor.register_thread(f"EmailWorker-{email_uid}", current_thread)
        
        try:
            # 使用EmlAidata处理邮件
            email_result = self.eml_aidata.process_single_email_with_uid(email_uid, msg_data)
            
            if email_result:
                # 工作线程完成后：从处理中移除，添加到已处理缓存
                with self._stats_lock:
                    if email_uid in self._processing_uids:
                        self._processing_uids.discard(email_uid)
                        self._processed_uids.add(email_uid)
                        
                        # 限制缓存大小，移除最旧的一半
                        if len(self._processed_uids) > self._processed_uids_max_size:
                            old_uids = list(self._processed_uids)[:self._processed_uids_max_size // 2]
                            for old_uid in old_uids:
                                self._processed_uids.discard(old_uid)
                                
                    self._processing_count -= 1
                return {
                    'success': True,
                    'result': email_result,
                    'UID': email_uid,
                    'worker': current_thread.name
                }
            else:
                return {
                    'success': False,
                    'error': '邮件处理失败',
                    'UID': email_uid,
                    'worker': current_thread.name
                }
                    
        except Exception as e:
            Logger.error(f"❌ 处理邮件 {email_uid} 时出错: {e}")
            with self._stats_lock:
                if email_uid in self._processing_uids:
                    self._processing_uids.discard(email_uid)
                    self._processing_count -= 1
            return {
                'success': False,
                'error': str(e),
                'UID': email_uid,
                'worker': current_thread.name
            }
        finally:
            # 取消注册并恢复原始线程名
            if hasattr(self, 'memory_monitor'):
                self.memory_monitor.unregister_thread(f"EmailWorker-{email_uid}")
                current_thread.name = original_name
                Logger.debug(f"🐋 工作线程完成，释放计数器: {email_uid}, 剩余处理数: {self._processing_count}")

    def _convert_fields_using_session_cache(self, email_data):
        """直接使用session state中的字段映射缓存进行字段转换"""
        # 检查session state中是否有字段映射缓存
        if not self.field_mapping_cache or not self.field_mapping_cache['loaded']:
            raise ValueError("❌ Session state中字段映射缓存未加载")
        
        # 直接使用session state中的缓存进行字段名转换
        try:
            mapped_result = {}
            for ai_field, value in email_data.items():
                if value is not None and str(value).strip():  # 处理非空值
                    db_field = self.field_mapping_cache['ai_to_db'].get(ai_field)
                    if db_field:  # 只有找到映射的字段才添加
                        mapped_result[db_field] = None if value == '' else value
            return mapped_result
        except Exception as e:
            raise ValueError(f"❌ 使用session state缓存转换失败: {e}，回退到直接查询")
    
    def _result_collector_loop(self):
        """结果收集线程 - 确保数据同步"""
        try:
            Logger.info("✅ 结果收集线程启动")
            last_log_update = 0

            delete_uids = []
            results = []
            # 初始化Excel相关变量
            target_file = self.eml_aidata.target_file
            
            while not self.stop_event.is_set():
                # 从结果队列中获取已完成的future对象
                completed_futures = []
                has_new_results = False
                pending_futures = []
                max_check_count = 20  # 最多检查20个future，避免队列过大时性能问题

                # 遍历队列，查找已完成的future
                for _ in range(max_check_count):
                    try:
                        future = self.result_queue.get_nowait()
                        if future.done():
                            completed_futures.append(future)
                            has_new_results = True
                        else:
                            pending_futures.append(future)
                    except queue.Empty:
                        break

                # 将未完成的future放回队列
                for future in pending_futures:
                    self.result_queue.put(future)

                # 如果没有已完成的结果，阻塞等待一个
                if not completed_futures:
                    try:
                        future = self.result_queue.get(timeout=1)
                        if future.done():
                            completed_futures.append(future)
                            has_new_results = True
                        else:
                            self.result_queue.put(future)
                    except queue.Empty:
                        continue
                try:
                    # 处理完成的结果
                    for future in completed_futures:
                        Logger.info(f"扫描到结果数量：{len(completed_futures)}")
                        try:
                            result = future.result(timeout=1)
                            if result['success']:
                                # 在结果收集线程中进行完整的数据处理
                                # 1. 数据验证和转换
                                valid_email_result = self.field_map_dao.validate_data(result['result'])
                                mapped_email_result = self._convert_fields_using_session_cache(valid_email_result)
                                
                                if mapped_email_result:
                                    
                                    # 2. 保存结果到数据库，更新统计信息 - 使用EmailProcessor的统计数据
                                    if self.email_dao.save(mapped_email_result):
                                        # 3.先数据库校验，再决定要不要写入和删除
                                        delete_uids.append(result['UID'])
                                        results.append(result['result'])
                                        with self._stats_lock:
                                            self.total_success_count += 1
                                            self.total_processed_count += 1
                                        Logger.info(f"✅ 邮件{result['result']['邮件主题']}处理完成: {result['UID']}, {result['result']['最新识别结果']}，结果已保存到数据库。")
                                    else:
                                        Logger.error(f"❌ 数据库结果保存失败: {result['UID']}")
                                        with self._stats_lock:
                                            self.total_error_count += 1
                                            self.total_processed_count += 1
                                    
                                    Logger.debug(f"数据处理和保存成功: {result['worker']}")
                                else:
                                    Logger.error(f"字段转换后无有效数据: {result['worker']}")
                                    with self._stats_lock:
                                        self.total_error_count += 1
                                        self.total_processed_count += 1
                            else:
                                # 处理失败的情况
                                Logger.debug(f"❌ 收集到失败结果: {result['worker']} - {result.get('reason', result.get('error', 'unknown'))}")
                                with self._stats_lock:
                                    self.total_error_count += 1
                                    self.total_processed_count += 1
                        
                        except Exception as e:
                            Logger.error(f"❌ 获取任务结果失败: {e}")
                            with self._stats_lock:
                                self.total_error_count += 1
                                self.total_processed_count += 1


                    # 3. Excel文件更新
                    if results and len(results) > self.max_results_memory_limit:
                        self._update_excel_file(results, target_file)
                        results = []
                    
                    # 4. 删除已处理的邮件
                    if delete_uids and len(delete_uids) > self.max_deleted_email_limit:      
                        Logger.info(f"🔴 即将删除邮件UID：{delete_uids}")         
                        with self._email_client_lock:
                            Logger.info("🔴 删除邮件中...")
                            self.email_client.delete_email_by_uids(delete_uids)    
                        delete_uids = []
                    # 发送日志和统计信息更新
                    current_time = time.time()
                    if has_new_results or (current_time - last_log_update > 5):
                        self._process_debug_logs()
                        self._send_stats_update()  # 发送统计信息更新
                        last_log_update = current_time
                    
                    time.sleep(2)
                except Exception as e:
                    Logger.error(f"❌ 结果收集出错: {e}")
                    
        except Exception as e:
            Logger.error(f"❌ 结果收集线程出错: {e}")
            Logger.info("结果收集线程结束")
        finally:
            if results:
                self._update_excel_file(results, target_file)
            if delete_uids:
                self.email_client.delete_email_by_uids(delete_uids)
    def _process_debug_logs(self):
        """从debug文件中读取最新的500条日志数据"""
        try:
            # 获取最新的debug日志文件
            log_files = glob.glob("logs/debug_*.log")
            if not log_files:
                return
            
            latest_log_file = max(log_files, key=os.path.getctime)
            
            # 读取最后500行
            with open(latest_log_file, 'r', encoding='utf-8') as f:
                lines = f.readlines()
                recent_logs = [line.strip() for line in lines[-500:] if line.strip()]
            
            # 将日志数据放入队列
            if recent_logs:
                log_data = {
                    'debug_logs': recent_logs,
                    'timestamp': datetime.now().strftime('%H:%M:%S'),
                    'log_file': latest_log_file
                }
                self.message_queue.put(log_data)
                
        except Exception as e:
            Logger.error(f"读取debug日志文件失败: {e}")
    
    def get_updates(self):
        """获取所有待处理的更新"""
        updates = []
        while not self.message_queue.empty():
            try:
                updates.append(self.message_queue.get_nowait())
            except queue.Empty:
                break
        return updates
    
    def get_concurrent_status(self):
        """获取并发处理状态"""
        with self._stats_lock:
            return {
                'is_running': self.is_running,
                'max_workers': self.max_workers,
                'processing_count': self._processing_count,
                'result_queue_size': self.result_queue.qsize(),
                'executor_status': 'active' if self.executor and not self.executor._shutdown else 'inactive',
                'heartbeat_status': 'active' if time.time() - self._last_heartbeat_time < self._heartbeat_interval * 2 else 'inactive'
            }
    def _update_excel_file(self, email_result, target_file):
        """更新Excel文件 - 线程安全版本"""
        try:
            # 确保目标文件存在
            if not os.path.exists(target_file):
                target_file = FileProcessor.create_newfile_by_template_retryable(
                    template_file=self.eml_aidata.template_file, 
                    target_file=target_file, 
                    max_retries=5, 
                    retry_interval=5, 
                    start_index=2
                )
                Logger.info(f"✅ 结果文件创建成功: {target_file}")
            
            # 加载Excel文件
            output_wb, output_ws = FileProcessor.load_wordbook_retryable(target_file)
            Logger.info(f"✅ Excel文件加载成功: {target_file}")
            # 更新或追加到工作表
            row_datas, chinese_headers = self.eml_aidata._update_or_append_to_sheet(
                output_ws, email_result
            )
            
            # 按表头顺序构建新行数据
            ordered_rows = ExcelProcessor.sort_generated_rows(row_datas, chinese_headers)
            
            # 保存文件
            FileProcessor.save_file_retryable(
                target_file, ordered_rows, is_format_applied=True, 
                is_append=True, output_wb=output_wb
            )
            
            if row_datas:
                Logger.info(f"💾 结果文件保存成功: {target_file}，新增{len(row_datas)}条数据")
                
        except Exception as e:
            raise Exception(f"❌ Excel文件更新失败: {traceback.format_exc()}")

    def _send_stats_update(self):
        """发送统计信息更新"""
        try:
            with self._stats_lock:
                stats_update = {
                    'type': 'stats_update',
                    'timestamp': datetime.now().strftime('%H:%M:%S'),
                    'full_timestamp': datetime.now(),
                    'stats': {
                        'processed_count': self.total_processed_count,
                        'remaining_count': self.remaining_emails,
                        'success_count': self.total_success_count,
                        'error_count': self.total_error_count,
                        'processing_count': self._processing_count,              # 实际剩余邮件数      # 正在处理的邮件数
                        'max_workers': self.max_workers,
                        'result_queue_size': self.result_queue.qsize()
                    }
                }
            self.message_queue.put(stats_update)
        except Exception as e:
            Logger.error(f"❌ 发送统计信息更新失败: {e}")
    def _heartbeat_worker(self):
        """心跳工作线程方法"""
        Logger.info("💓 心跳线程启动")
        
        while not self.stop_event.is_set():
            try:
                with self._email_client_lock:
                    # 发送心跳
                    self.email_client.noop()
                
                # 等待2分钟，或直到收到停止信号
                if self.stop_event.wait(120):  # 120秒 = 2分钟
                    break
                    
            except Exception as e:
                Logger.error(f"❌ 心跳线程异常: {str(e)}")
                # 异常时等待30秒后重试
                if self.stop_event.wait(10):
                    break
                    
        Logger.info("💓 心跳线程结束")


    def _fetch_email_with_timeout(self, email_uid, timeout_seconds=60):
        """带超时的邮件获取"""
        def fetch_task():
            return self.email_client.fetch_email_by_uid(email_uid, '(RFC822)')
        
        with ThreadPoolExecutor(max_workers=1) as executor:
            future = executor.submit(fetch_task)
            try:
                status, msg_data = future.result(timeout=timeout_seconds)
                return status, msg_data
            except (TimeoutError, FutureTimeoutError) as e:
                raise TimeoutError(f"⏰ 邮件 {email_uid} 获取超时 ({timeout_seconds}秒)")
