from tqdm import tqdm
from odps import ODPS

from tqdm import tqdm
from odps import ODPS


class ODPSUploader:
    def __init__(self, access_id, access_key, project, endpoint):
        self.odps = ODPS(access_id, access_key, project, endpoint)

    def upload_csv_to_table(self, table_name, csv_file_path):
        # 确保表格存在
        if not self.odps.exist_table(table_name):
            raise Exception(f"Table {table_name} does not exist.")

        table = self.odps.get_table(table_name)

        # 读取 CSV 文件并计算总行数（减去标题行）
        with open(csv_file_path, 'r') as file:
            total_lines = sum(1 for line in file) - 1

        with open(csv_file_path, 'r') as file:
            # 跳过标题行
            file.readline()

            # 创建一个进度条
            tqdm_iterator = tqdm(file, total=total_lines, unit="row")

            records = []
            for line in tqdm_iterator:
                values = line.strip().split(',')
                record = table.new_record(values)
                records.append(record)
                # 当达到指定的批量大小时，写入数据
                if len(records) == 100000:
                    with table.open_writer() as writer:
                        writer.write(records)
                    records = []  # 清空记录列表以便下一批次

            # 确保最后一批不满batch_size的数据也能被写入
            if records:
                with table.open_writer() as writer:
                    writer.write(records)

if __name__ == '__main__':
    odps_loader = ODPSUploader("LTAI5tJcbPNJW8Mvd3eJR2JR", "ldWxZWiaejGtxGWTbRAxywNVcNUDG2", "MaxCompute_instance",
                               "http://service.cn-zhangjiakou.maxcompute.aliyun.com/api")
    odps_loader.upload_csv_to_table(
        "insurance_customer_id_encrypt_mapping", "customer_id_dncrypt.csv"
    )
