#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""
核注单数据清理工具

本程序用于清理MySQL数据库中核注单表中seq_no重复的数据。
清理规则：
1. 如果入库单表已经绑定了核注单表体Id则保留该条记录
2. 删除没有生成过入库单的核注单数据
"""

import mysql.connector
from mysql.connector import Error
import logging
import sys
import os
from datetime import datetime
import importlib.util
import argparse

# 设置控制台输出编码
if sys.stdout.encoding != 'utf-8':
    sys.stdout.reconfigure(encoding='utf-8')

# 导入配置
def load_config(config_path='config.py'):
    """加载配置文件"""
    try:
        if os.path.exists(config_path):
            spec = importlib.util.spec_from_file_location("config", config_path)
            config = importlib.util.module_from_spec(spec)
            spec.loader.exec_module(config)
            return config
        else:
            print(f"配置文件 {config_path} 不存在，将使用默认配置")
            return None
    except Exception as e:
        print(f"加载配置文件时出错: {e}")
        return None

# 默认配置
DEFAULT_DB_CONFIG = {
    'host': '10.10.10.164',
    'port': 33066,
    'database': 'trade-service-platform',
    'user': 'root',
    'password': '2020jieguan2023'
}

DEFAULT_LOG_CONFIG = {
    'level': 'INFO',
    'log_file': True,
    'log_console': True,
    'log_file_path': 'logs',
}

DEFAULT_CLEANUP_CONFIG = {
    'batch_size': 0,
    'dry_run': False,
    'include_tenant_id': [63],
}

# 配置日志
def setup_logging(log_config):
    """设置日志"""
    log_level = getattr(logging, log_config.get('level', 'INFO'))
    handlers = []
    
    if log_config.get('log_file', True):
        # 获取日志目录
        log_dir = log_config.get('log_file_path', 'logs')
        
        # 确保日志目录存在
        if not os.path.exists(log_dir):
            os.makedirs(log_dir)
            
        # 生成日志文件完整路径
        log_file_name = f"cleanup_log_{datetime.now().strftime('%Y%m%d_%H%M%S')}.log"
        log_file_path = os.path.join(log_dir, log_file_name)
        
        handlers.append(logging.FileHandler(log_file_path, encoding='utf-8'))
        logger_file_path = log_file_path  # 保存日志文件路径供后续使用
    
    if log_config.get('log_console', True):
        console_handler = logging.StreamHandler(sys.stdout)
        # 确保控制台输出也使用UTF-8编码
        console_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
        handlers.append(console_handler)
    
    logging.basicConfig(
        level=log_level,
        format='%(asctime)s - %(levelname)s - %(message)s',
        handlers=handlers
    )
    
    logger = logging.getLogger(__name__)
    if log_config.get('log_file', True):
        logger.info(f"日志文件保存在: {log_file_path}")
    
    return logger

def connect_to_database(db_config):
    """连接到MySQL数据库"""
    try:
        connection = mysql.connector.connect(**db_config)
        if connection.is_connected():
            logger.info(f"已成功连接到MySQL数据库: {db_config['database']}")
            return connection
    except Error as e:
        logger.error(f"连接数据库时发生错误: {e}")
        return None

def get_duplicate_seq_no(connection, cleanup_config):
    """获取表头中seq_no重复的记录"""
    cursor = connection.cursor(dictionary=True)
    try:
        # 查询重复的seq_no
        query = """
        SELECT seq_no, COUNT(*) as count
        FROM nems_invt_head
        WHERE 1=1
        """
        
        params = []
        # 添加租户条件
        if cleanup_config.get('include_tenant_id'):
            tenant_ids = cleanup_config['include_tenant_id']
            if len(tenant_ids) == 1:
                query += " AND TENANT_ID = %s"
                params.append(tenant_ids[0])
            elif len(tenant_ids) > 1:
                placeholders = ', '.join(['%s'] * len(tenant_ids))
                query += f" AND TENANT_ID IN ({placeholders})"
                params.extend(tenant_ids)
        
        # 添加seq_no非空条件
        query += " AND seq_no IS NOT NULL AND seq_no != ''"
        
        # 分组查找重复
        query += """
        GROUP BY seq_no
        HAVING COUNT(*) > 1
        """
        
        # 限制批量大小
        batch_size = cleanup_config.get('batch_size', 0)
        if batch_size > 0:
            query += f" LIMIT {batch_size}"
        
        cursor.execute(query, params)
        duplicates = cursor.fetchall()
        logger.info(f"找到 {len(duplicates)} 个重复的seq_no")
        return duplicates
    except Error as e:
        logger.error(f"查询重复seq_no时发生错误: {e}")
        return []
    finally:
        cursor.close()

def process_duplicates(connection, cleanup_config):
    """处理重复的seq_no记录"""
    duplicates = get_duplicate_seq_no(connection, cleanup_config)
    if not duplicates:
        logger.info("没有找到重复的seq_no记录")
        return
    
    total_deleted = 0
    cursor = connection.cursor(dictionary=True)
    
    try:
        for duplicate in duplicates:
            seq_no = duplicate['seq_no']
            logger.info(f"处理重复的seq_no: {seq_no}")
            
            # 获取有相同seq_no的表头记录
            head_query = """
            SELECT id FROM nems_invt_head
            WHERE seq_no = %s
            """
            cursor.execute(head_query, (seq_no,))
            head_records = cursor.fetchall()
            head_ids = [record['id'] for record in head_records]
            
            # 对于每个表头ID，查找关联的表体记录以及是否有入库单
            records_to_keep = []
            records_to_delete = []
            
            for head_id in head_ids:
                # 获取关联的表体记录
                list_query = """
                SELECT id FROM nems_invt_list
                WHERE inv_id = %s
                """
                cursor.execute(list_query, (head_id,))
                list_records = cursor.fetchall()
                
                has_storage = False
                for list_record in list_records:
                    list_id = list_record['id']
                    
                    # 检查是否有关联的入库单
                    storage_query = """
                    SELECT id FROM storage_detail
                    WHERE INVT_LIST_ID = %s
                    """
                    cursor.execute(storage_query, (list_id,))
                    storage_records = cursor.fetchall()
                    
                    if storage_records:
                        has_storage = True
                        break
                
                if has_storage:
                    records_to_keep.append(head_id)
                else:
                    records_to_delete.append(head_id)
            
            # 如果所有记录都没有入库单，则保留一条，删除其他
            if not records_to_keep and records_to_delete:
                records_to_keep.append(records_to_delete.pop(0))
            
            # 记录将要删除的信息
            logger.info(f"将保留的表头ID: {records_to_keep}")
            logger.info(f"将删除的表头ID: {records_to_delete}")
            
            # 如果是试运行模式，则不执行删除操作
            if cleanup_config.get('dry_run', False):
                logger.info(f"试运行模式: 跳过删除操作")
                total_deleted += len(records_to_delete)
                continue
            
            # 删除前确认
            if records_to_delete:
                print("\n" + "="*60)
                print(f"序列号 {seq_no} 将删除以下记录:")
                for head_id in records_to_delete:
                    list_query = """
                    SELECT COUNT(*) as count FROM nems_invt_list
                    WHERE inv_id = %s
                    """
                    cursor.execute(list_query, (head_id,))
                    list_count = cursor.fetchone()['count']
                    print(f"- 表头ID: {head_id} (关联表体记录数: {list_count})")
                print(f"保留的表头ID: {records_to_keep}")
                print("="*60)
                
                confirm = input("\n按回车键确认删除上述记录，输入任意字符并回车跳过: ")
                if confirm.strip():
                    print("已跳过删除")
                    continue
                print("开始删除...")
            
            # 删除不需要保留的记录
            # 使用一个事务处理每个seq_no的所有删除操作
            try:
                for head_id in records_to_delete:
                    try:
                        # 先查询验证记录是否存在
                        verify_query = """
                        SELECT 1 FROM nems_invt_head WHERE id = %s
                        """
                        cursor.execute(verify_query, (head_id,))
                        if not cursor.fetchone():
                            logger.warning(f"表头ID: {head_id} 不存在，跳过删除")
                            print(f"表头ID: {head_id} 不存在，跳过删除")
                            continue
                        
                        # 先删除关联的表体记录
                        delete_list_query = """
                        DELETE FROM nems_invt_list
                        WHERE inv_id = %s
                        """
                        cursor.execute(delete_list_query, (head_id,))
                        list_rows_deleted = cursor.rowcount
                        logger.info(f"已删除表体记录数: {list_rows_deleted}, 表头ID: {head_id}")
                        
                        # 再删除表头记录
                        delete_head_query = """
                        DELETE FROM nems_invt_head
                        WHERE id = %s
                        """
                        cursor.execute(delete_head_query, (head_id,))
                        head_rows_deleted = cursor.rowcount
                        
                        if head_rows_deleted > 0:
                            total_deleted += 1
                            logger.info(f"已删除表头ID: {head_id}, 影响行数: {head_rows_deleted}")
                            print(f"已删除表头ID: {head_id}, 影响行数: {head_rows_deleted}")
                            
                            # 验证删除结果
                            verify_query = """
                            SELECT 1 FROM nems_invt_head WHERE id = %s
                            """
                            cursor.execute(verify_query, (head_id,))
                            if cursor.fetchone():
                                logger.error(f"删除表头ID: {head_id} 失败，记录仍然存在!")
                                print(f"删除表头ID: {head_id} 失败，记录仍然存在!")
                            else:
                                logger.info(f"验证成功，表头ID: {head_id} 已被删除")
                                print(f"验证成功，表头ID: {head_id} 已被删除")
                        
                    except Error as e:
                        logger.error(f"删除记录 {head_id} 时发生错误: {e}")
                        print(f"删除记录 {head_id} 时发生错误: {e}")
                
                # 提交每个seq_no的所有删除操作
                connection.commit()
                logger.info(f"已提交删除操作, seq_no: {seq_no}")
                print(f"已提交删除操作, seq_no: {seq_no}")
                
            except Error as e:
                connection.rollback()
                logger.error(f"处理 seq_no: {seq_no} 时发生错误，事务已回滚: {e}")
                print(f"处理 seq_no: {seq_no} 时发生错误，事务已回滚: {e}")
        
        if cleanup_config.get('dry_run', False):
            logger.info(f"试运行模式: 共有 {total_deleted} 条记录可以删除")
        else:
            logger.info(f"成功删除 {total_deleted} 条重复记录")
            print(f"成功删除 {total_deleted} 条重复记录")
    except Error as e:
        connection.rollback()
        logger.error(f"处理重复记录时发生错误: {e}")
    except Exception as e:
        connection.rollback()
        logger.error(f"发生未预期的错误: {e}")
    finally:
        cursor.close()

def parse_arguments():
    """解析命令行参数"""
    parser = argparse.ArgumentParser(description='核注单数据清理工具')
    parser.add_argument('-c', '--config', default='config.py', help='配置文件路径')
    parser.add_argument('--dry-run', action='store_true', help='试运行模式，不执行实际删除')
    return parser.parse_args()

def main():
    """主函数"""
    args = parse_arguments()
    
    # 加载配置
    config = load_config(args.config)
    
    # 使用配置或默认值
    db_config = getattr(config, 'DB_CONFIG', DEFAULT_DB_CONFIG) if config else DEFAULT_DB_CONFIG
    log_config = getattr(config, 'LOG_CONFIG', DEFAULT_LOG_CONFIG) if config else DEFAULT_LOG_CONFIG
    cleanup_config = getattr(config, 'CLEANUP_CONFIG', DEFAULT_CLEANUP_CONFIG) if config else DEFAULT_CLEANUP_CONFIG
    
    # 如果命令行指定了试运行模式，则覆盖配置文件的设置
    if args.dry_run:
        cleanup_config['dry_run'] = True
    
    # 配置日志
    global logger
    logger = setup_logging(log_config)
    
    logger.info("开始执行核注单数据清理程序")
    logger.info(f"配置信息: 数据库={db_config['host']}:{db_config['port']}, 试运行模式={cleanup_config.get('dry_run', False)}")
    
    # 连接数据库
    connection = connect_to_database(db_config)
    if not connection:
        logger.error("无法连接到数据库，程序退出")
        return
    
    try:
        # 处理重复数据
        process_duplicates(connection, cleanup_config)
        logger.info("数据清理完成")
    except Exception as e:
        logger.error(f"执行过程中发生错误: {e}")
    finally:
        if connection and connection.is_connected():
            connection.close()
            logger.info("数据库连接已关闭")

if __name__ == "__main__":
    main() 