import os
import threading
import time
import pandas as pd
from watchdog.events import FileSystemEventHandler
from watchdog.observers import Observer
import requests
import logging
from logging.handlers import TimedRotatingFileHandler
from collections import defaultdict
import uuid
import yaml
from queue import Queue, Empty
import tkinter as tk
from tkinter import messagebox

# 配置日志记录
log_formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
log_file = "CSV_Processor.log"

# 日志处理器
file_handler = TimedRotatingFileHandler(log_file, when='midnight', interval=1, backupCount=7)
file_handler.setFormatter(log_formatter)
file_handler.setLevel(logging.DEBUG)

console_handler = logging.StreamHandler()
console_handler.setFormatter(log_formatter)
console_handler.setLevel(logging.DEBUG)

logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.addHandler(file_handler)
logger.addHandler(console_handler)


class CSVFileHandler(FileSystemEventHandler):
    def __init__(self, config):
        self.config = config
        self.data_columns = config['data_columns']
        self.CSV_DIR = config['directories']['csv_dir']

        # API配置
        self.API_ENDPOINT = config['api']['endpoint']
        self.API_TIMEOUT = config['api'].get('timeout', 30)
        self.API_HEADERS = {'Content-Type': 'application/json'}
        self.last_request_time = 0
        self.MIN_REQUEST_INTERVAL = 0.5  # 最小请求间隔(秒)

        # 文件处理相关
        self.processed_files = set()
        self.file_queue = Queue()
        self.file_retries = {}
        self.last_modified = defaultdict(float)
        self.max_retries = 5

        # 用户输入
        self.work_order = None
        self.material_code = None
        self.face_position = None

        self.create_gui()
        self.start_processing_thread()

    def create_gui(self):
        """创建简单的GUI用于输入工单信息"""
        self.root = tk.Tk()
        self.root.title("CSV文件处理器")

        tk.Label(self.root, text="工单号:").grid(row=0, sticky='e')
        self.order_entry = tk.Entry(self.root)
        self.order_entry.grid(row=0, column=1)

        tk.Label(self.root, text="物料编码:").grid(row=1, sticky='e')
        self.material_entry = tk.Entry(self.root)
        self.material_entry.grid(row=1, column=1)

        tk.Label(self.root, text="面位置(A/B):").grid(row=2, sticky='e')
        self.face_entry = tk.Entry(self.root)
        self.face_entry.grid(row=2, column=1)

        submit_btn = tk.Button(self.root, text="提交", command=self.submit_input)
        submit_btn.grid(row=3, columnspan=2)

        self.root.protocol("WM_DELETE_WINDOW", self.on_closing)

    def submit_input(self):
        """处理用户提交的工单信息"""
        self.work_order = self.order_entry.get().strip()
        self.material_code = self.material_entry.get().strip()
        self.face_position = self.face_entry.get().strip().upper()

        if not all([self.work_order, self.material_code, self.face_position]):
            messagebox.showwarning("输入错误", "请填写所有字段")
            return

        if self.face_position not in ['A', 'B']:
            messagebox.showwarning("输入错误", "面位置必须是A或B")
            return

        logger.info(
            f"工单信息设置 - 工单号: {self.work_order}, 物料编码: {self.material_code}, 面位置: {self.face_position}")
        messagebox.showinfo("成功", "工单信息已设置")

    def on_closing(self):
        """关闭窗口确认"""
        if messagebox.askokcancel("退出", "确定要退出程序吗？"):
            self.root.destroy()

    def start_processing_thread(self):
        """启动文件处理线程"""
        self.processing_thread = threading.Thread(target=self.process_queue, daemon=True)
        self.processing_thread.start()

    def on_modified(self, event):
        """监控文件修改事件"""
        if not event.is_directory and event.src_path.endswith('.CSV'):
            file_path = os.path.abspath(event.src_path)
            mod_time = os.path.getmtime(file_path)

            # 防止重复处理
            if (mod_time - self.last_modified[file_path] > 1) and (file_path not in self.processed_files):
                self.last_modified[file_path] = mod_time
                self.file_queue.put(file_path)
                logger.info(f"检测到新文件: {file_path}")

    def parse_csv(self, file_path):
        """解析CSV文件"""
        try:
            df = pd.read_csv(file_path, encoding='GB2312', header=None, names=range(31))
            logger.info(f"成功读取文件: {file_path}, 行数: {len(df)}")

            # 提取第一部分数据
            part1_config = self.data_columns['part1']
            part1 = df.iloc[1, part1_config['columns']].fillna("")

            # 提取第二部分数据
            part2_config = self.data_columns['part2']
            part2_start = part2_config['start_row']

            # 跳过空行
            while part2_start < len(df) and pd.isna(df.iloc[part2_start, 0]):
                part2_start += 1

            if part2_start >= len(df):
                logger.warning("未找到第二部分数据起始行")
                return part1, None

            part2 = df.iloc[part2_start:, part2_config['columns']].fillna("")
            return part1, part2

        except Exception as e:
            logger.error(f"解析CSV文件失败: {file_path}, 错误: {e}")
            raise

    def prepare_api_data(self, part1, part2):
        """准备发送到API的数据结构"""
        return {
            "metadata": {
                "work_order": self.work_order,
                "material_code": self.material_code,
                "face_position": self.face_position,
                "timestamp": time.strftime("%Y-%m-%d %H:%M:%S"),
                "source": "CSV_Processor"
            },
            "part1": part1.tolist() if hasattr(part1, 'tolist') else list(part1),
            "part2": part2.values.tolist() if part2 is not None else None
        }

    def send_to_api(self, data, max_retries=3):
        """发送数据到后端API"""
        request_id = str(uuid.uuid4())[:8]
        logger.info(f"[{request_id}] 准备发送数据到API端点: {self.API_ENDPOINT}")

        # 控制请求频率
        elapsed = time.time() - self.last_request_time
        if elapsed < self.MIN_REQUEST_INTERVAL:
            time.sleep(self.MIN_REQUEST_INTERVAL - elapsed)

        for attempt in range(max_retries):
            try:
                start_time = time.time()
                response = requests.post(
                    self.API_ENDPOINT,
                    json=data,
                    headers=self.API_HEADERS,
                    timeout=self.API_TIMEOUT
                )
                elapsed = time.time() - start_time

                logger.info(
                    f"[{request_id}] API响应 - 状态码: {response.status_code}, "
                    f"耗时: {elapsed:.2f}s, 尝试: {attempt + 1}/{max_retries}"
                )

                response.raise_for_status()
                self.last_request_time = time.time()
                return response.json()

            except requests.exceptions.RequestException as e:
                logger.error(f"[{request_id}] 请求失败: {str(e)}")
                if attempt == max_retries - 1:
                    raise
                time.sleep((attempt + 1) * 2)  # 指数退避

    def process_file(self, file_path):
        """处理单个文件"""
        logger.info(f"开始处理文件: {file_path}")

        if not all([self.work_order, self.material_code, self.face_position]):
            logger.error("工单信息未设置，跳过文件处理")
            return False

        try:
            part1, part2 = self.parse_csv(file_path)
            api_data = self.prepare_api_data(part1, part2)
            self.send_to_api(api_data)
            self.processed_files.add(file_path)
            logger.info(f"文件处理完成: {file_path}")
            return True

        except Exception as e:
            logger.error(f"处理文件失败: {file_path}, 错误: {e}")
            return False

    def process_queue(self):
        """处理文件队列"""
        while True:
            try:
                file_path = self.file_queue.get(timeout=1)
                retry_count = self.file_retries.get(file_path, 0)

                if retry_count >= self.max_retries:
                    logger.error(f"文件达到最大重试次数: {file_path}")
                    self.file_queue.task_done()
                    continue

                success = self.process_file(file_path)
                if not success:
                    self.file_retries[file_path] = retry_count + 1
                    if retry_count < self.max_retries - 1:
                        self.file_queue.put(file_path)
                        logger.info(f"重新排队文件: {file_path}, 重试: {retry_count + 1}")

                self.file_queue.task_done()

            except Empty:
                continue

    def start_monitoring(self):
        """启动文件监控"""
        self.observer = Observer()
        self.observer.schedule(self, self.CSV_DIR, recursive=False)
        self.observer.start()
        logger.info(f"开始监控目录: {self.CSV_DIR}")

        try:
            while True:
                time.sleep(1)
        except KeyboardInterrupt:
            self.observer.stop()
        self.observer.join()

    def run(self):
        """运行应用程序"""
        self.root.deiconify()
        self.root.mainloop()


if __name__ == "__main__":
    # 加载配置文件
    with open('sendBack.yml', 'rb') as f:
        config = yaml.safe_load(f)

    # 验证必要配置
    if not all(k in config.get('api', {}) for k in ['endpoint']):
        raise ValueError("缺少必要的API配置")

    # 创建并运行处理器
    handler = CSVFileHandler(config)

    # 启动监控线程
    monitor_thread = threading.Thread(target=handler.start_monitoring, daemon=True)
    monitor_thread.start()

    # 启动GUI主循环
    handler.run()