import os
import time
import pandas as pd
import pymysql
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import boto3
import yaml
from queue import Queue
import datetime


class CSVFileHandler(FileSystemEventHandler):
    def __init__(self, config):
        self.config = config
        self.sql_statements = config['sql_statements']
        self.data_columns = config['data_columns']
        self.db_connection = None
        self.connect_to_db()
        self.CSV_DIR = config['directories']['csv_dir']
        self.MINIO_ENDPOINT = config['minio']['endpoint']
        self.MINIO_ACCESS_KEY = config['minio']['access_key']
        self.MINIO_SECRET_KEY = config['minio']['secret_key']
        self.BUCKET_NAME = config['minio']['bucket']
        self.select_last_primary_id = config['sql']['select_primary_id']
        self.processed_files = set()
        self.minio_client = None  # 初始化为空
        self.file_queue = Queue()  # 创建一个队列

        # 在启动时扫描文件夹，将已存在的文件加入 processed_files
        self.scan_initial_files()

    def connect_to_db(self):
        """ 连接到数据库 """
        self.db_connection = pymysql.connect(
            host=self.config['database']['host'],
            port=self.config['database']['port'],
            user=self.config['database']['user'],
            password=self.config['database']['password'],
            database=self.config['database']['db']
        )

    def reconnect_to_db(self):
        """ 断线重连数据库 """
        while True:
            try:
                self.connect_to_db()
                print("数据库重新连接成功")
                break
            except pymysql.MySQLError as e:
                print(f"数据库连接失败，正在重试... 错误: {e}")
                time.sleep(5)  # 等待5秒后重试

    def check_db_connection(self):
        """ 检查数据库连接是否有效，如果无效则重连 """
        try:
            self.db_connection.ping(reconnect=True)
        except pymysql.MySQLError:
            self.reconnect_to_db()

    def get_latest_csv_files(self):
        files = [os.path.join(self.CSV_DIR, f) for f in os.listdir(self.CSV_DIR) if f.endswith('.CSV')]
        if not files:
            return []
        return sorted(files, key=os.path.getmtime, reverse=True)  # 获取所有CSV文件

    def parse_csv(self, file_path):
        df = pd.read_csv(file_path, encoding='GB2312', header=None, names=range(31))
        print(f"df的长度为{len(df)}")
        # 使用配置文件中的列配置
        part1_config = self.data_columns['part1']
        part1 = df.iloc[1, part1_config['columns']].fillna("")
        part2_config = self.data_columns['part2']
        part2_start = part2_config['start_row']
        # 检查part2_start是否超出DataFrame的行数范围
        if part2_start >= len(df):
            print("part2_start超出DataFrame的行数范围，part2内容为空")
            return part1, None
        while pd.isna(df.iloc[part2_start, 0]):  # 跳过空白行，直到有非空白行为止
            part2_start += 1
            if part2_start >= len(df):  # 再次检查是否超出范围
                print("part2_start超出DataFrame的行数范围，part2内容为空")
                return part1, None
        part2 = df.iloc[part2_start:, part2_config['columns']].fillna("")
        if part2.empty:
            print("part2 内容为空，跳过主表和子表的插入操作")
            return part1, None
        return part1, part2

    def insert_into_db(self, part1, part2):
        self.check_db_connection()  # 检查数据库连接
        with self.db_connection.cursor() as cursor:
            try:
                values = tuple(part1[col] for col in self.data_columns['part1']['columns'])
                print("开始执行主表插入")
                primary_table_fields = self.config['primary_table_fields']
                placeholders = ', '.join(['%s'] * len(values))
                insert_sql = self.config['sql_statements']['insert_aoi_machine_ng_records']
                sql = insert_sql.format(primary_table_fields=', '.join(primary_table_fields), placeholders=placeholders)
                cursor.execute(sql, values)
                print("主表执行成功")
                self.db_connection.commit()
                print(f"barcode为{values[3]}")
                barcode = values[3]
                print(f"barcode为{barcode}")
                print(self.config['sql']['select_primary_id'])
                cursor.execute(self.config['sql']['select_primary_id'], (barcode,))  # 使用barcode作为查询条件
                result1 = cursor.fetchone()
                print(f"id为：{result1}")
                if result1:
                    last_primary_id = result1[0]
                    print(last_primary_id)
                    print("获取到主表id字段数据")
                else:
                    print("未在表中找到数据.")
                    return
                print("开始执行子表操作")
                if part2 is not None and not part2.empty:
                    detail_fields = self.config['detail_fields']
                    placeholders_detail = ', '.join(['%s'] * len(detail_fields))
                    insert_detail_sql = self.config['sql_statements']['insert_pcb_test_records_details']
                    sql_detail = insert_detail_sql.format(detail_fields=', '.join(detail_fields),
                                                          placeholders_detail=placeholders_detail)
                    for index, row in part2.iterrows():
                        # 构建子表数据
                        detail_values = [last_primary_id] + [row[col] for col in self.data_columns['part2']['columns']]

                        # 处理图片上传
                        img_path_index = 16
                        img_path = detail_values[img_path_index]
                        if img_path and os.path.exists(img_path):
                            img_url = self.upload_to_minio(img_path)
                            detail_values[img_path_index] = img_url
                        cursor.execute(sql_detail, detail_values)
                    self.db_connection.commit()
                    print("子表执行成功")
                else:
                    print("part2 内容为空，跳过子表插入操作")
            except Exception as e:
                self.db_connection.rollback()
                print(f'Error inserting URL into database: {e}')

    def process_file(self, file_path):
        start_time = time.time()
        print(f'开始处理文件 {file_path}，时间: {datetime.datetime.now()}')
        part1, part2 = self.parse_csv(file_path)
        if part1 is None or part2 is None:
            return
        self.check_db_connection()
        self.insert_into_db(part1, part2)
        end_time = time.time()
        print(f'处理文件 {file_path} 完成，时间: {datetime.datetime.now()}，耗时: {end_time - start_time:.2f}秒')

    def scan_and_process(self):
        while True:
            scan_start_time = time.time()
            print(f'开始扫描文件，时间: {datetime.datetime.now()}')
            try:
                latest_files = self.get_latest_csv_files()
                new_files = [f for f in latest_files if os.path.basename(f) not in self.processed_files]
                for file_path in new_files:
                    self.file_queue.put(file_path)  # 将新文件加入队列

                # 从队列中取出文件进行处理
                while not self.file_queue.empty():
                    file_path = self.file_queue.get()
                    file_name = os.path.basename(file_path)
                    if file_name not in self.processed_files:
                        self.process_file(file_path)
                        self.processed_files.add(file_name)
            except Exception as e:
                print(f'Error processing files: {e}')
            scan_end_time = time.time()
            print(f'扫描和处理文件完成，时间: {datetime.datetime.now()}，耗时: {scan_end_time - scan_start_time:.2f}秒')
            time.sleep(5)  # 每5秒扫描一次

    def upload_to_minio(self, file_path):
        if not self.minio_client:
            self.minio_client = boto3.client(
                's3',
                endpoint_url=self.MINIO_ENDPOINT,
                aws_access_key_id=self.MINIO_ACCESS_KEY,
                aws_secret_access_key=self.MINIO_SECRET_KEY
            )

        # 提取文件名
        file_name = os.path.basename(file_path)

        # 获取当前日期
        date_str = datetime.datetime.now().strftime('%Y%m%d')

        # 构造对象名称
        object_name = f"ALD7120/SpcLocalImg/{date_str}/{file_name}"

        bucket_name = self.config['minio']['bucket']
        self.minio_client.upload_file(file_path, bucket_name, object_name)
        return f'{self.MINIO_ENDPOINT}/{self.BUCKET_NAME}/{object_name}'

    def scan_initial_files(self):
        """ 在启动时扫描文件夹，将已存在的文件加入 processed_files """
        initial_files = self.get_latest_csv_files()
        for file_path in initial_files:
            file_name = os.path.basename(file_path)
            self.processed_files.add(file_name)


if __name__ == "__main__":
    with open('config2.yml', 'r') as file:
        config = yaml.safe_load(file)
    event_handler = CSVFileHandler(config)
    print("启动扫描任务")
    # 启动定时扫描任务
    event_handler.scan_and_process()

    # 关闭数据库连接
    event_handler.db_connection.close()
    print("数据库连接已关闭")