import pymysql
import time
from concurrent.futures import ThreadPoolExecutor, as_completed
import logging
import traceback
from datetime import datetime
import re
import threading
from queue import Queue
from dbutils.pooled_db import PooledDB
import requests
from urllib.parse import urlparse
import socket

# 配置信息
CONFIG = {
    'DB_POOL': {
        'maxconnections': 20,    # 最大连接数
        'mincached': 5,         # 初始连接数
        'maxcached': 10,        # 最大空闲连接
        'maxshared': 10         # 最大共享连接数
    },
    'THREAD_POOL': {
        'goods_workers': 6,     # 商品处理线程数
        'sku_workers': 4,       # SKU处理线程数
        'image_workers': 3      # 图片处理线程数
    },
    'MONITOR': {
        'update_interval': 5,   # 监控信息更新间隔（秒）
        'retry_times': 3,       # 失败重试次数
        'retry_delay': 1        # 重试延迟（秒）
    },
    'IMAGE': {
        'timeout': 5,           # 图片请求超时时间（秒）
        'retry_times': 2,       # 图片请求重试次数
        'valid_codes': [200],   # 有效的 HTTP 状态码
        'check_content_type': True  # 是否检查内容类型
    }
}

# 日志配置
log_file = f"migration_errors_{datetime.now().strftime('%Y%m%d_%H%M%S')}.log"
logging.basicConfig(
    filename=log_file,
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s - %(message)s'
)

# 数据库配置
source_db_config = {
    "host": "127.0.0.1",
    "user": "root",
    "password": "123456",
    "database": "maigeyin",
    "autocommit": True,
}

target_db_config = {
    "host": "127.0.0.1",
    "user": "root",
    "password": "123456",
    "database": "5i40",
    "autocommit": True,
}

# 创建数据库连接池
source_pool = PooledDB(
    creator=pymysql,
    maxconnections=CONFIG['DB_POOL']['maxconnections'],
    mincached=CONFIG['DB_POOL']['mincached'],
    maxcached=CONFIG['DB_POOL']['maxcached'],
    maxshared=CONFIG['DB_POOL']['maxshared'],
    blocking=True,
    maxusage=10000,
    setsession=[],
    ping=1,
    **source_db_config
)

target_pool = PooledDB(
    creator=pymysql,
    maxconnections=CONFIG['DB_POOL']['maxconnections'],
    mincached=CONFIG['DB_POOL']['mincached'],
    maxcached=CONFIG['DB_POOL']['maxcached'],
    maxshared=CONFIG['DB_POOL']['maxshared'],
    blocking=True,
    maxusage=10000,
    setsession=[],
    ping=1,
    **target_db_config
)

# 连接队列
source_conn_queue = Queue(maxsize=CONFIG['DB_POOL']['maxconnections'])
target_conn_queue = Queue(maxsize=CONFIG['DB_POOL']['maxconnections'])

# 统计信息类
class MigrationStats:
    def __init__(self):
        self._lock = threading.Lock()
        self.reset()
    
    def reset(self):
        with self._lock:
            self.start_time = time.time()
            self.total_goods = 0
            self.processed_goods = 0
            self.processed_skus = 0
            self.failed_goods = 0
            self.failed_skus = 0
            self.retried_operations = 0
            self.invalid_images = 0
            self.current_speed = 0
            self.avg_speed = 0
            self.estimated_time = 0
    
    def update_progress(self, goods_delta=0, sku_delta=0, failed_goods_delta=0, failed_sku_delta=0, invalid_images_delta=0):
        with self._lock:
            self.processed_goods += goods_delta
            self.processed_skus += sku_delta
            self.failed_goods += failed_goods_delta
            self.failed_skus += failed_sku_delta
            self.invalid_images += invalid_images_delta
            
            elapsed_time = time.time() - self.start_time
            if elapsed_time > 0:
                self.avg_speed = self.processed_goods / elapsed_time
                remaining_items = self.total_goods - self.processed_goods
                self.estimated_time = remaining_items / self.avg_speed if self.avg_speed > 0 else 0
    
    def get_stats(self):
        with self._lock:
            return {
                'total': self.total_goods,
                'processed': self.processed_goods,
                'failed': self.failed_goods,
                'skus': self.processed_skus,
                'failed_skus': self.failed_skus,
                'retries': self.retried_operations,
                'invalid_images': self.invalid_images,
                'speed': self.avg_speed,
                'eta': self.estimated_time
            }

# 创建全局统计对象
stats = MigrationStats()

# 数据库连接管理
def init_connection_pools():
    """初始化连接池"""
    try:
        for _ in range(CONFIG['DB_POOL']['mincached']):
            source_conn_queue.put(source_pool.connection())
            target_conn_queue.put(target_pool.connection())
        logging.info(f"连接池初始化完成，初始连接数: {CONFIG['DB_POOL']['mincached']}")
    except Exception as e:
        logging.error(f"初始化连接池失败: {str(e)}")
        raise

def get_source_connection():
    """获取源数据库连接"""
    try:
        return source_conn_queue.get(timeout=5)
    except:
        return source_pool.connection()

def get_target_connection():
    """获取目标数据库连接"""
    try:
        return target_conn_queue.get(timeout=5)
    except:
        return target_pool.connection()

def return_connections(source_conn, target_conn):
    """归还数据库连接"""
    if source_conn:
        try:
            source_conn_queue.put(source_conn, timeout=1)
        except:
            source_conn.close()
    if target_conn:
        try:
            target_conn_queue.put(target_conn, timeout=1)
        except:
            target_conn.close()

# 图片处理相关
class ImageValidator:
    def __init__(self):
        self._session = requests.Session()
        self._session.headers.update({
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
        })
        self._cache = {}
        self._lock = threading.Lock()

    def verify_image(self, image_url):
        """验证图片是否可访问"""
        if not image_url:
            return False

        # 检查缓存
        with self._lock:
            if image_url in self._cache:
                return self._cache[image_url]

        try:
            for _ in range(CONFIG['IMAGE']['retry_times']):
                try:
                    response = self._session.head(
                        image_url, 
                        timeout=CONFIG['IMAGE']['timeout'],
                        allow_redirects=True
                    )
                    
                    is_valid = (
                        response.status_code in CONFIG['IMAGE']['valid_codes'] and
                        (not CONFIG['IMAGE']['check_content_type'] or 
                         'image' in response.headers.get('content-type', '').lower())
                    )
                    
                    # 缓存结果
                    with self._lock:
                        self._cache[image_url] = is_valid
                    
                    if not is_valid:
                        stats.update_progress(invalid_images_delta=1)
                        logging.warning(f"无效的图片URL: {image_url}, 状态码: {response.status_code}")
                    
                    return is_valid

                except (requests.RequestException, socket.timeout) as e:
                    logging.warning(f"验证图片失败: {image_url}, 错误: {str(e)}")
                    continue

            return False

        except Exception as e:
            logging.error(f"验证图片时发生错误: {str(e)}, URL: {image_url}")
            return False

# 创建全局图片验证器实例
image_validator = ImageValidator()

def process_image_path(path):
    """处理图片路径，验证图片是否存在"""
    if not path:
        return ""
    
    # 替换旧域名
    if '5izazhi.5izazhi.com' in path:
        path = path.replace('5izazhi.5izazhi.com', 'zimg.zazhiejia.com')
    
    # 处理路径
    if path.startswith(('http://', 'https://')):
        image_url = path
    else:
        path = path.lstrip('/')
        image_url = f"https://zimg.zazhiejia.com/{path}"
    
    # 验证图片
    if image_validator.verify_image(image_url):
        return image_url
    return ""

def process_content_images(content):
    """处理内容中的图片路径"""
    if not content:
        return content
    
    def replace_image(match):
        path = match.group(1)
        new_path = process_image_path(path)
        return f'src="{new_path}"' if new_path else ''
    
    # 处理 src="path" 的情况
    pattern = r'src=["\']((?!data:)[^"\']+)["\']'
    content = content.replace('5izazhi.5izazhi.com', 'zimg.zazhiejia.com')
    return re.sub(pattern, replace_image, content)

def clean_empty_images(goods_data):
    """清理商品数据中的空图片路径"""
    # 清理单个图片字段
    for field in ['goods_thumb', 'goods_img', 'original_img', 'goods_office', 'goods_trial']:
        if field in goods_data and not goods_data[field]:
            goods_data[field] = None
    
    # 清理图片列表
    if 'images' in goods_data and goods_data['images']:
        images = [img for img in goods_data['images'].split(',') if img.strip()]
        goods_data['images'] = ','.join(images) if images else None
    
    return goods_data

# 监控函数
def monitor_progress():
    """监控迁移进度"""
    while True:
        try:
            current_stats = stats.get_stats()
            
            # 清屏并显示当前状态
            print("\033[2J\033[H")  # 清屏
            print("=== 数据迁移监控 ===")
            print(f"总商品数: {current_stats['total']}")
            print(f"已处理商品: {current_stats['processed']}")
            print(f"处理失败: {current_stats['failed']}")
            print(f"已处理SKU: {current_stats['skus']}")
            print(f"SKU处理失败: {current_stats['failed_skus']}")
            print(f"无效图片: {current_stats['invalid_images']}")
            print(f"重试操作: {current_stats['retries']}")
            print(f"当前速度: {current_stats['speed']:.2f} 商品/秒")
            print(f"预计剩余时间: {current_stats['eta']:.2f} 秒")
            print("=" * 30)
            
            # 如果处理完成，退出监控
            if current_stats['processed'] + current_stats['failed'] >= current_stats['total']:
                break
                
            time.sleep(CONFIG['MONITOR']['update_interval'])
            
        except Exception as e:
            logging.error(f"监控线程发生错误: {str(e)}")
            time.sleep(CONFIG['MONITOR']['update_interval'])


# 重试装饰器
def with_retry(func):
    """带重试机制的装饰器"""
    def wrapper(*args, **kwargs):
        for attempt in range(CONFIG['MONITOR']['retry_times']):
            try:
                result = func(*args, **kwargs)
                return result
            except Exception as e:
                if attempt < CONFIG['MONITOR']['retry_times'] - 1:
                    stats.retried_operations += 1
                    logging.warning(f"操作失败，准备第 {attempt + 2} 次重试: {str(e)}")
                    time.sleep(CONFIG['MONITOR']['retry_delay'])
                else:
                    raise
    return wrapper

@with_retry
def process_single_sku(product, new_goods_id, base_price, market_price, sku_map):
    """处理单个 SKU"""
    source_conn = None
    target_conn = None
    source_cursor = None
    target_cursor = None
    goods_sn = None
    
    try:
        source_conn = get_source_connection()
        target_conn = get_target_connection()
        source_cursor = source_conn.cursor(pymysql.cursors.DictCursor)
        target_cursor = target_conn.cursor(pymysql.cursors.DictCursor)

        goods_sn = product['product_sn']
        product_number = product['product_number']
        goods_attr = product['goods_attr']

        # SKU ID 处理
        if not goods_attr:
            sku_id = "0"
        else:
            try:
                sku_attrs = [attr.strip() for attr in goods_attr.split("|") if attr.strip()]
                sku_ids = []
                for attr in sku_attrs:
                    try:
                        attr_id = int(attr)
                        if attr_id in sku_map:
                            sku_ids.append(str(sku_map[attr_id]))
                    except (ValueError, TypeError):
                        continue
                sku_id = ",".join(sku_ids) if sku_ids else "0"
            except Exception:
                sku_id = "0"

        # 获取订阅年份
        years = 1
        if goods_attr and "|" in goods_attr:
            source_cursor.execute("""
                SELECT attr_value 
                FROM ecs_goods_attr 
                WHERE goods_attr_id = %s
            """, (goods_attr.split("|")[1],))
            attr_result = source_cursor.fetchone()
            if attr_result:
                years = get_subscription_years(attr_result['attr_value'])

        # 计算最终价格
        try:
            base_price = float(base_price) if base_price else 0.0
            market_price = float(market_price) if market_price else 0.0
            
            final_price = round(base_price * years, 2)
            final_market_price = round(market_price * years, 2)
            
            logging.info(f"""
                SKU价格计算:
                商品编号: {goods_sn}
                基准价格: {base_price}
                市场价格: {market_price}
                订阅年份: {years}
                最终价格: {final_price}
                最终市场价: {final_market_price}
            """)
            
        except (TypeError, ValueError) as e:
            logging.error(f"价格计算错误: {str(e)}, goods_sn: {goods_sn}")
            final_price = base_price
            final_market_price = market_price

        # 插入 SKU 数据
        current_time = int(time.time())
        sku_sql = """
            INSERT INTO fa_shop_goods_sku 
            (goods_id, goods_sn, sku_id, price, marketprice, stocks, createtime, updatetime)
            VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
        """
        target_cursor.execute(sku_sql, (
            new_goods_id, goods_sn, sku_id, final_price, 
            final_market_price, product_number, current_time, current_time
        ))
        target_conn.commit()

        stats.update_progress(sku_delta=1)

    except Exception as e:
        error_msg = f"处理 SKU 时发生错误: {str(e)}"
        if goods_sn:
            error_msg += f", goods_sn: {goods_sn}"
        logging.error(error_msg)
        stats.update_progress(failed_sku_delta=1)
        raise
    finally:
        if source_cursor:
            source_cursor.close()
        if target_cursor:
            target_cursor.close()
        return_connections(source_conn, target_conn)

@with_retry
def migrate_sku(old_goods_id, new_goods_id):
    """处理商品的规格和 SKU 数据"""
    source_conn = None
    target_conn = None
    try:
        source_conn = get_source_connection()
        target_conn = get_target_connection()
        source_cursor = source_conn.cursor(pymysql.cursors.DictCursor)
        target_cursor = target_conn.cursor(pymysql.cursors.DictCursor)

        # 获取商品基础价格
        source_cursor.execute("""
            SELECT shop_price, market_price 
            FROM ecs_goods 
            WHERE goods_id = %s
        """, (old_goods_id,))
        goods_price = source_cursor.fetchone()
        base_price = goods_price['shop_price']
        market_price = goods_price['market_price']

        # 获取规格属性
        source_cursor.execute("""
            SELECT ga.*, a.attr_name 
            FROM ecs_goods_attr ga 
            LEFT JOIN ecs_attribute a ON ga.attr_id = a.attr_id 
            WHERE ga.goods_id = %s
        """, (old_goods_id,))
        goods_attrs = source_cursor.fetchall()

        spec_map = {}
        sku_map = {}

        # 处理规格模板
        for attr in goods_attrs:
            attr_id = attr['attr_id']
            attr_value = attr['attr_value']
            attr_name = attr['attr_name']

            # 确定规格名称
            spec_name = "起刊时间" if "时间" in attr_name or "日期" in attr_name else "订阅年份"

            # 插入规格
            if attr_id not in spec_map:
                current_time = int(time.time())
                spec_sql = """
                    INSERT INTO fa_shop_spec 
                    (name, createtime, updatetime) 
                    VALUES (%s, %s, %s)
                """
                target_cursor.execute(spec_sql, (spec_name, current_time, current_time))
                target_conn.commit()

                target_cursor.execute("SELECT LAST_INSERT_ID() AS id")
                spec_map[attr_id] = target_cursor.fetchone()['id']

            # 插入规格值
            current_time = int(time.time())
            spec_value_sql = """
                INSERT INTO fa_shop_spec_value 
                (spec_id, value, createtime, updatetime) 
                VALUES (%s, %s, %s, %s)
            """
            target_cursor.execute(spec_value_sql, 
                (spec_map[attr_id], attr_value, current_time, current_time)
            )
            target_conn.commit()

            target_cursor.execute("SELECT LAST_INSERT_ID() AS id")
            spec_value_id = target_cursor.fetchone()['id']
            sku_map[attr['goods_attr_id']] = spec_value_id

            # 插入商品规格关系
            sku_spec_sql = """
                INSERT INTO fa_shop_goods_sku_spec 
                (goods_id, spec_id, spec_value_id, createtime, updatetime) 
                VALUES (%s, %s, %s, %s, %s)
            """
            target_cursor.execute(sku_spec_sql, 
                (new_goods_id, spec_map[attr_id], spec_value_id, current_time, current_time)
            )
            target_conn.commit()

        # 处理 SKU
        source_cursor.execute("""
            SELECT * FROM ecs_products 
            WHERE goods_id = %s
        """, (old_goods_id,))
        products = source_cursor.fetchall()

        # 使用线程池处理 SKU
        with ThreadPoolExecutor(max_workers=CONFIG['THREAD_POOL']['sku_workers']) as executor:
            futures = []
            for product in products:
                futures.append(
                    executor.submit(
                        process_single_sku,
                        product,
                        new_goods_id,
                        base_price,
                        market_price,
                        sku_map
                    )
                )

            for future in as_completed(futures):
                try:
                    future.result()
                except Exception as e:
                    logging.error(f"SKU 处理任务失败: {str(e)}")
                    stats.update_progress(failed_sku_delta=1)

    except Exception as e:
        logging.error(f"处理规格和 SKU 时发生错误: {str(e)}")
        raise
    finally:
        if source_cursor:
            source_cursor.close()
        if target_cursor:
            target_cursor.close()
        return_connections(source_conn, target_conn)

@with_retry
def process_single_goods(goods, category_map):
    """处理单个商品"""
    target_conn = None
    target_cursor = None
    try:
        target_conn = get_target_connection()
        target_cursor = target_conn.cursor(pymysql.cursors.DictCursor)

        goods_id = goods['goods_id']
        cat_id = goods['cat_id']
        category_id = category_map.get(cat_id, 999)

        # 处理图片路径
        goods_thumb = process_image_path(goods['goods_thumb'])
        goods_img = process_image_path(goods['goods_img'])
        original_img = process_image_path(goods['original_img'])
        
        # 处理描述
        description = goods['goods_brief']
        if description:
            description = description.replace('杂志E家', '麦格音').replace('杂志2007', '麦格音')

        # 处理关键词
        keywords = []
        for field in ['keywords', 'introduce', 'recommended', 'shopdian']:
            if goods.get(field):
                keywords.append(goods[field])
        keywords_str = ','.join(filter(None, keywords))

        # 处理商品详情
        content = goods.get('goods_desc', '')
        if content:
            content = content.replace('杂志E家', '麦格音').replace('杂志2007', '麦格音')
            content = process_content_images(content)

        # 准备商品数据
        goods_data = {
            'category_id': category_id,
            'goods_sn': goods['goods_sn'],
            'title': goods['goods_name'],
            'description': description,
            'content': content,
            'keywords': keywords_str,
            'price': goods['shop_price'],
            'marketprice': goods['market_price'],
            'stocks': goods['goods_number'],
            'image': goods_thumb,
            'images': f"{goods_img},{original_img}" if goods_img or original_img else None,
            'spectype': 1,
            'goods_office': process_image_path(goods.get('goods_office', '')),
            'goods_trial': process_image_path(goods.get('goods_trial', '')),
            'goods_kind': goods.get('goods_kind', ''),
            'pinyin': goods.get('pinyin', ''),
            'weigh': goods.get('ranking', 0),
            'createtime': int(time.time()),
            'updatetime': int(time.time())
        }

        # 清理空图片
        goods_data = clean_empty_images(goods_data)

        # 插入商品数据
        fields = ', '.join(goods_data.keys())
        placeholders = ', '.join(['%s'] * len(goods_data))
        goods_sql = f"INSERT INTO fa_shop_goods ({fields}) VALUES ({placeholders})"
        target_cursor.execute(goods_sql, tuple(goods_data.values()))
        target_conn.commit()

        # 获取新商品ID
        target_cursor.execute("SELECT LAST_INSERT_ID() AS id")
        new_goods_id = target_cursor.fetchone()['id']

        # 处理规格和SKU
        migrate_sku(goods_id, new_goods_id)

        stats.update_progress(goods_delta=1)
        logging.info(f"商品迁移成功: {goods['goods_name']} (ID: {goods_id} -> {new_goods_id})")

    except Exception as e:
        error_msg = f"迁移商品 ID {goods_id} ({goods['goods_name']}) 时发生错误: {str(e)}"
        logging.error(error_msg)
        stats.update_progress(failed_goods_delta=1)
        raise
    finally:
        if target_cursor:
            target_cursor.close()
        if target_conn:
            return_connections(None, target_conn)

def migrate_goods(category_map):
    """迁移商品数据"""
    source_conn = None
    source_cursor = None
    try:
        source_conn = get_source_connection()
        source_cursor = source_conn.cursor(pymysql.cursors.DictCursor)
        
        # 获取所有商品
        source_cursor.execute("SELECT * FROM ecs_goods ORDER BY goods_id")
        goods_list = source_cursor.fetchall()
        stats.total_goods = len(goods_list)
        
        print(f"开始迁移 {stats.total_goods} 个商品...")

        # 启动监控线程
        monitor_thread = threading.Thread(target=monitor_progress, daemon=True)
        monitor_thread.start()

        # 使用线程池处理商品
        with ThreadPoolExecutor(max_workers=CONFIG['THREAD_POOL']['goods_workers']) as executor:
            futures = []
            for goods in goods_list:
                futures.append(
                    executor.submit(process_single_goods, goods, category_map)
                )

            # 等待所有任务完成
            for future in as_completed(futures):
                try:
                    future.result()
                except Exception as e:
                    logging.error(f"商品处理任务失败: {str(e)}")

        # 等待监控线程结束
        monitor_thread.join(timeout=5)

        # 打印最终统计信息
        final_stats = stats.get_stats()
        print("\n迁移完成!")
        print(f"总商品数: {final_stats['total']}")
        print(f"成功处理: {final_stats['processed']}")
        print(f"处理失败: {final_stats['failed']}")
        print(f"处理SKU数: {final_stats['skus']}")
        print(f"SKU失败数: {final_stats['failed_skus']}")
        print(f"无效图片数: {final_stats['invalid_images']}")
        print(f"重试操作数: {final_stats['retries']}")

    except Exception as e:
        logging.error(f"迁移商品过程中发生错误: {str(e)}\n{traceback.format_exc()}")
        print(f"迁移商品过程中发生错误: {str(e)}")
    finally:
        if source_cursor:
            source_cursor.close()
        if source_conn:
            return_connections(source_conn, None)

def get_subscription_years(attr_value):
    """根据属性值获取订阅年份"""
    if not attr_value:
        return 1
    
    attr_value = str(attr_value).lower()
    if "两年" in attr_value or "2年" in attr_value:
        return 2
    elif "三年" in attr_value or "3年" in attr_value:
        return 3
    return 1

def clear_target_tables():
    """清空目标数据库相关表数据"""
    print("清空目标数据库相关表数据...")
    target_conn = None
    target_cursor = None
    try:
        target_conn = get_target_connection()
        target_cursor = target_conn.cursor()
        tables = ["fa_shop_category", "fa_shop_goods", "fa_shop_goods_sku", 
                 "fa_shop_goods_sku_spec", "fa_shop_spec", "fa_shop_spec_value"]
        for table in tables:
            sql = f"TRUNCATE TABLE {table}"
            target_cursor.execute(sql)
            print(f"已清空表 {table}")
        target_conn.commit()
    except Exception as e:
        logging.error(f"清空目标表时发生错误: {e}")
        print(f"清空目标表时发生错误: {e}")
    finally:
        if target_cursor:
            target_cursor.close()
        if target_conn:
            return_connections(None, target_conn)


def create_default_category():
    """创建默认分类"""
    print("创建默认分类...")
    target_conn = None
    target_cursor = None
    try:
        target_conn = get_target_connection()
        target_cursor = target_conn.cursor()
        sql = """
            INSERT INTO fa_shop_category 
            (id, name, pid, description, isnav, weigh, status, createtime, updatetime)
            VALUES (999, '其他杂志', 0, '未匹配分类', 1, 0, 'normal', %s, %s)
        """
        current_time = int(time.time())
        target_cursor.execute(sql, (current_time, current_time))
        target_conn.commit()
        print("默认分类创建成功！")
    except Exception as e:
        logging.error(f"创建默认分类时发生错误: {e}")
        print(f"创建默认分类时发生错误: {e}")
    finally:
        if target_cursor:
            target_cursor.close()
        if target_conn:
            return_connections(None, target_conn)

def main():
    """主函数"""
    try:
        print("开始数据迁移...")
        
        # 初始化连接池
        init_connection_pools()
        
        # 清空目标表
        clear_target_tables()
        
        # 创建默认分类
        create_default_category()
        
        # 示例分类映射
        category_map = {0: 999}  # 可以根据需要修改分类映射
        
        # 迁移商品
        migrate_goods(category_map)
        
        print("数据迁移完成！")
        
    except Exception as e:
        logging.error(f"迁移过程中发生错误: {str(e)}\n{traceback.format_exc()}")
        print(f"迁移过程中发生错误: {str(e)}")

if __name__ == "__main__":
    main()
