﻿import pymysql
import time
import logging
from datetime import datetime
from typing import Dict, Any
import json


class MySQLHourlySync:

    def __init__(self, config_file: str = 'sync_config.json'):
        self.config = self.load_config(config_file)
        logging.basicConfig(
            level=logging.INFO,
            format='%(asctime)s - %(levelname)s - %(message)s',
            datefmt='%Y-%m-%d %H:%M:%S'
        )
        self.logger = logging.getLogger(__name__)
        self.logger.info("🔄 MySQL小时同步程序初始化完成")

    def load_state(self) -> Dict[str, Any]:
        try:
            with open("sync_state.json", "r", encoding="utf-8") as f:
                return json.load(f)
        except FileNotFoundError:
            return {}

    def save_state(self, state: Dict[str, Any]):
        with open("sync_state.json", "w", encoding="utf-8") as f:
            json.dump(state, f, indent=2, ensure_ascii=False)

    def load_config(self, config_file: str) -> Dict[str, Any]:
        try:
            with open(config_file, 'r', encoding='utf-8') as f:
                return json.load(f)
        except FileNotFoundError:
            default_config = {
                'source_db': {
                    'host': 'localhost',
                    'user': 'root',
                    'password': 'wozui666',
                    'database': '123321',
                    'port': 3306,
                    'charset': 'utf8mb4'
                },
                'target_db': {
                    'host': 'localhost',
                    'user': 'root',
                    'password': 'wozui666',
                    'database': '12',
                    'port': 3306,
                    'charset': 'utf8mb4'
                },
                "table_mappings": {
                    "videos": {
                        "source_table": "t_dynamics",
                        "incremental_key": "create_date",
                        "columns": {
                            "id": "vid",
                            "user_id": "u_id",
                            "cate_id": "category",
                            "create_date": "publish_time",
                            "likes": "likes",
                            "favorites": "favorites",
                            "fen": "reposts",
                            "tags": "labels",
                            "video": "video"
                        }
                    },
                    "t_member": {
                        "source_table": "t_member",
                        "incremental_key": "update_date",
                        "columns": {}
                    }
                },
                'sync_interval_hours': 1
            }
            with open(config_file, 'w', encoding='utf-8') as f:
                json.dump(default_config, f, indent=2, ensure_ascii=False)
            return default_config

    def test_connections(self) -> bool:
        try:
            source_conn = pymysql.connect(**self.config['source_db'])
            source_conn.close()
            target_conn = pymysql.connect(**self.config['target_db'])
            target_conn.close()
            self.logger.info("✅ 数据库连接测试成功")
            return True
        except Exception as e:
            self.logger.error(f"❌ 数据库连接失败: {e}")
            return False

    def get_table_columns(self, conn, table_name: str) -> list:
        try:
            with conn.cursor() as cursor:
                cursor.execute(f"SHOW COLUMNS FROM {table_name}")
                return [column[0] for column in cursor.fetchall()]
        except Exception as e:
            self.logger.error(f"❌ 获取表 {table_name} 字段失败: {e}")
            return []

    def convert_value(self, value, target_col: str):
        if value is None:
            return None
        if isinstance(value, datetime):
            return value.strftime('%Y-%m-%d %H:%M:%S')
        return value

    def sync_table(self, table_name: str, table_config: Dict[str, Any]):
        source_conn = None
        target_conn = None

        try:
            source_conn = pymysql.connect(**self.config['source_db'])
            target_conn = pymysql.connect(**self.config['target_db'])

            source_columns = self.get_table_columns(source_conn, table_config['source_table'])
            target_columns = self.get_table_columns(target_conn, table_name)

            if not source_columns or not target_columns:
                self.logger.error(f"❌ 无法获取 {table_name} 的字段信息")
                return

            # 字段映射：如果配置了 columns 就用配置，否则自动匹配
            if table_config.get("columns"):
                columns_mapping = table_config["columns"]
            else:
                common_cols = list(set(source_columns) & set(target_columns))
                columns_mapping = {col: col for col in common_cols}

            self.logger.info(f"📊 {table_name} 字段映射: {columns_mapping}")

            state = self.load_state()
            last_sync_time = state.get(table_name)
            incremental_key = table_config.get("incremental_key")

            with source_conn.cursor(pymysql.cursors.DictCursor) as cursor:
                if incremental_key:
                    if last_sync_time:
                        sql = f"SELECT * FROM {table_config['source_table']} WHERE {incremental_key} > %s"
                        cursor.execute(sql, (last_sync_time,))
                        self.logger.info(f"🔍 增量模式: 仅同步 {incremental_key} > {last_sync_time} 的数据")
                    else:
                        cursor.execute(f"SELECT * FROM {table_config['source_table']}")
                        self.logger.info(f"📥 首次同步 {table_config['source_table']} 全部数据")
                else:
                    cursor.execute(f"SELECT * FROM {table_config['source_table']}")
                    self.logger.info(f"📥 无增量字段，执行全量同步")

                source_data = cursor.fetchall()

            if not source_data:
                self.logger.info(f"📭 表 {table_config['source_table']} 无新数据需要同步")
                return

            self.logger.info(f"📦 获取到 {len(source_data)} 条数据 from {table_config['source_table']}")

            success_count = 0
            with target_conn.cursor() as cursor:
                for row in source_data:
                    try:
                        valid_columns = []
                        valid_values = []

                        for source_col, target_col in columns_mapping.items():
                            if target_col in target_columns and source_col in row:
                                converted_value = self.convert_value(row[source_col], target_col)
                                valid_columns.append(target_col)
                                valid_values.append(converted_value)

                        if valid_columns:
                            columns_str = ', '.join(valid_columns)
                            placeholders = ', '.join(['%s'] * len(valid_values))
                            update_str = ', '.join([f"{col} = VALUES({col})" for col in valid_columns])

                            sql = f"INSERT INTO {table_name} ({columns_str}) VALUES ({placeholders}) ON DUPLICATE KEY UPDATE {update_str}"
                            cursor.execute(sql, valid_values)
                            success_count += 1

                            if success_count % 100 == 0:
                                self.logger.info(f"⏳ 已处理 {success_count} 条记录")

                    except Exception as e:
                        self.logger.error(f"❌ 记录插入失败: {e}")
                        self.logger.error(f"📝 失败数据: {row}")
                        continue

                target_conn.commit()

            if incremental_key and source_data:
                max_time = max(
                    [row[incremental_key] for row in source_data if row[incremental_key] is not None]
                )
                if isinstance(max_time, datetime):
                    state[table_name] = max_time.strftime("%Y-%m-%d %H:%M:%S")
                else:
                    state[table_name] = str(max_time)
                self.save_state(state)
                self.logger.info(f"🕒 更新 {table_name} 增量时间戳: {state[table_name]}")

            self.logger.info(f"✅ {table_name} 同步完成: {success_count}/{len(source_data)} 条记录")

        except Exception as e:
            if target_conn:
                target_conn.rollback()
            self.logger.error(f"❌ 表 {table_name} 同步失败: {e}")
            raise
        finally:
            if source_conn:
                source_conn.close()
            if target_conn:
                target_conn.close()

    def run_sync(self):
        self.logger.info("🚀 开始执行同步任务")
        start_time = time.time()
        if not self.test_connections():
            return False
        try:
            for table_name, table_config in self.config['table_mappings'].items():
                self.sync_table(table_name, table_config)
            elapsed_time = time.time() - start_time
            self.logger.info(f"🎉 同步完成! 耗时: {elapsed_time:.2f}秒")
            return True
        except Exception as e:
            self.logger.error(f"❌ 同步任务失败: {e}")
            return False

    def start_hourly_sync(self):
        self.logger.info(f"⏰ 启动每小时同步服务，间隔: {self.config['sync_interval_hours']}小时")
        self.run_sync()
        interval_seconds = self.config['sync_interval_hours'] * 3600
        try:
            while True:
                self.logger.info(f"⏳ 下一次同步将在 {self.config['sync_interval_hours']} 小时后执行")
                time.sleep(interval_seconds)
                self.run_sync()
        except KeyboardInterrupt:
            self.logger.info("⏹️  同步服务已停止")
        except Exception as e:
            self.logger.error(f"❌ 同步服务异常: {e}")


if __name__ == "__main__":
    try:
        sync = MySQLHourlySync('sync_config.json')
        sync.start_hourly_sync()
    except Exception as e:
        print(f"❌ 程序启动失败: {e}")
