import mysql.connector
from mysql.connector import errorcode
import csv
import os
import datetime
import time
# 修正tqdm导入方式
from tqdm import tqdm as progressbar  # 重命名以避免冲突


def import_csv_to_mysql(csv_file_path, batch_size=5000, clear_existing_data=True):
    connection = None
    try:
        # 连接到MySQL服务器
        connection = mysql.connector.connect(
            host="localhost",
            user="root",
            password="123456",
            port=3306
        )

        if connection.is_connected():
            cursor = connection.cursor()

            # 创建数据库（如果不存在）
            cursor.execute("CREATE DATABASE IF NOT EXISTS user_behavior DEFAULT CHARACTER SET utf8mb4")
            # 选择使用该数据库
            cursor.execute("USE user_behavior")

            # 创建用户行为表（如果不存在）
            create_table_query = """
            CREATE TABLE IF NOT EXISTS user_behavior (
                id INT AUTO_INCREMENT PRIMARY KEY,
                user_id INT NOT NULL,
                item_id INT NOT NULL,
                behavior_type VARCHAR(20) NOT NULL,
                behavior_time DATETIME NOT NULL,
                category_id INT,
                INDEX idx_user (user_id),
                INDEX idx_item (item_id),
                INDEX idx_time (behavior_time)
            ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
            """
            cursor.execute(create_table_query)
            print("表user_behavior准备就绪")

            # 如果需要，清空现有数据
            if clear_existing_data:
                cursor.execute("TRUNCATE TABLE user_behavior")
                print("已清空表中现有数据")

            # 检查文件大小，用于进度显示
            file_size = os.path.getsize(csv_file_path)
            print(f"文件大小: {file_size / (1024 * 1024):.2f} MB")

            # 读取CSV文件并插入数据
            insert_query = """
            INSERT INTO user_behavior 
            (user_id, item_id, behavior_type, behavior_time, category_id) 
            VALUES (%s, %s, %s, %s, %s)
            """

            row_count = 0
            data_batch = []
            start_time = time.time()

            with open(csv_file_path, 'r', encoding='utf-8') as csvfile:
                csvreader = csv.reader(csvfile)

                # 跳过表头
                header = next(csvreader)
                print(f"CSV表头: {header}")

                # 先获取总行数，用于进度计算
                total_rows = sum(1 for _ in csvreader)
                csvfile.seek(0)  # 回到文件开头
                next(csvreader)  # 再次跳过表头
                print(f"预计总数据行数: {total_rows}")

                # 使用重命名后的tqdm创建进度条
                pbar = progressbar(total=total_rows, unit='行', desc='导入进度')

                for row_num, row in enumerate(csvreader, 2):
                    try:
                        if len(row) < 5:
                            print(f"\n行 {row_num} 数据不完整，跳过: {row}")
                            pbar.update(1)
                            continue

                        user_id = int(row[0])
                        item_id = int(row[1])
                        category_id = int(row[2])
                        behavior_type = row[3]
                        timestamp = row[4]

                        try:
                            # 转换时间戳为datetime格式
                            behavior_time = datetime.datetime.fromtimestamp(int(timestamp)).strftime(
                                '%Y-%m-%d %H:%M:%S')
                        except Exception as e:
                            print(f"\n行 {row_num} 时间戳转换失败: {e}, 时间戳: {timestamp}")
                            pbar.update(1)
                            continue

                        data_batch.append((user_id, item_id, behavior_type, behavior_time, category_id))
                        row_count += 1

                        # 批量插入
                        if len(data_batch) >= batch_size:
                            cursor.executemany(insert_query, data_batch)
                            connection.commit()
                            data_batch = []

                            # 更新进度条
                            pbar.update(batch_size)

                            # 计算并显示速度信息
                            elapsed_time = time.time() - start_time
                            rows_per_second = row_count / elapsed_time if elapsed_time > 0 else 0
                            remaining_time = (total_rows - row_count) / rows_per_second if rows_per_second > 0 else 0

                            # 更新进度条描述，显示速度和剩余时间
                            pbar.set_postfix({
                                '速度': f'{rows_per_second:.2f}行/秒',
                                '剩余': f'{remaining_time:.1f}秒'
                            })
                    except Exception as e:
                        print(f"\n处理行 {row_num} 时出错: {e}, 数据: {row}")
                        pbar.update(1)
                        continue

                # 关闭进度条
                pbar.close()

            # 插入剩余数据
            if data_batch:
                cursor.executemany(insert_query, data_batch)
                connection.commit()

            # 计算总耗时
            total_time = time.time() - start_time
            print(f"\n数据导入完成，共导入 {row_count} 条数据，耗时 {total_time:.2f} 秒")
            print(f"平均速度: {row_count / total_time:.2f} 行/秒")

    except mysql.connector.Error as err:
        print(f"数据库错误: {err}")
        if connection:
            connection.rollback()
    except Exception as e:
        print(f"导入错误: {e}")
        if connection:
            connection.rollback()
    finally:
        if 'connection' in locals() and connection is not None and connection.is_connected():
            cursor.close()
            connection.close()
            print("数据库连接已关闭")


if __name__ == "__main__":
    # 替换为你的完整CSV文件路径
    csv_file = r"C:\Users\LGXY\Downloads\UserBehavior-1.csv"

    if not os.path.exists(csv_file):
        print(f"错误: 文件 {csv_file} 不存在")
    else:
        # 确保tqdm已安装
        try:
            from tqdm import tqdm
        except ImportError:
            print("正在安装进度条库tqdm...")
            import subprocess
            import sys

            subprocess.check_call([sys.executable, "-m", "pip", "install", "tqdm"])

        # 调用函数
        import_csv_to_mysql(csv_file, batch_size=5000, clear_existing_data=True)