#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import subprocess
import sys
import re
import logging
import argparse
import time

# 配置日志
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)

def run_hive_command(cmd, check=True):
    """执行 Hive Beeline 命令，返回 stdout"""
    full_cmd = f'beeline -u "{beeline_url}" -n {username} -p {password} --showHeader=false --outputformat=tsv2 -e "{cmd}"'
    logger.debug(f"Executing: {full_cmd}")
    result = subprocess.run(full_cmd, shell=True, capture_output=True, text=True)
    if check and result.returncode != 0:
        logger.error(f"Command failed: {cmd}")
        logger.error(f"STDERR: {result.stderr}")
        raise RuntimeError(f"Hive command failed: {result.stderr}")
    return result.stdout.strip()

def get_table_partitions(db_name, table_name):
    """获取表的所有分区值列表，返回形如 ['dt=2025-10-01', 'dt=2025-10-02', ...]"""
    cmd = f"SHOW PARTITIONS {db_name}.{table_name};"
    output = run_hive_command(cmd)
    if not output:
        return []
    partitions = [line.strip() for line in output.split('\n') if line.strip()]
    return partitions

def get_table_create_ddl(db_name, table_name):
    """获取建表语句（用于创建临时表）"""
    cmd = f"SHOW CREATE TABLE {db_name}.{table_name};"
    ddl = run_hive_command(cmd)
    return ddl

def extract_partition_spec(partition_str):
    """将 'dt=2025-10-01/city=beijing' 转为字典 {'dt': '2025-10-01', 'city': 'beijing'}"""
    specs = partition_str.split('/')
    spec_dict = {}
    for spec in specs:
        if '=' in spec:
            k, v = spec.split('=', 1)
            spec_dict[k] = v
    return spec_dict

def build_partition_where_clause(spec_dict):
    """构建 WHERE 条件，如 dt='2025-10-01' AND city='beijing'"""
    clauses = []
    for k, v in spec_dict.items():
        clauses.append(f"{k} = '{v}'")
    return " AND ".join(clauses)

def build_partition_path(spec_dict):
    """构建分区路径 dt=2025-10-01/city=beijing"""
    return "/".join([f"{k}={v}" for k, v in spec_dict.items()])

def count_rows_in_partition(db_name, table_name, where_clause):
    """统计分区行数"""
    if where_clause:
        cmd = f"SELECT COUNT(*) FROM {db_name}.{table_name} WHERE {where_clause};"
    else:
        cmd = f"SELECT COUNT(*) FROM {db_name}.{table_name};"
    output = run_hive_command(cmd)
    return int(output) if output.isdigit() else 0

def main(args):
    global beeline_url, username, password
    beeline_url = args.beeline_url
    username = args.username
    password = args.password
    db_name = args.database
    table_name = args.table

    temp_table_name = f"{table_name}_temp_compact_{int(time.time())}"

    try:
        # Step 1: 获取原始表 DDL
        logger.info("Fetching table DDL...")
        ddl = get_table_create_ddl(db_name, table_name)
        if not ddl:
            raise RuntimeError("Failed to get table DDL")

        # Step 2: 创建临时表（非分区）
        # 移除 PARTITIONED BY 部分，创建非分区临时表
        create_temp_ddl = re.sub(r'PARTITIONED BY\s*\(.*?\)', '', ddl, flags=re.IGNORECASE | re.DOTALL)
        create_temp_ddl = re.sub(r'CREATE TABLE\s+\S+', f'CREATE TABLE {db_name}.{temp_table_name}', create_temp_ddl, flags=re.IGNORECASE)
        # 确保临时表是内部表（避免外部表路径问题）
        create_temp_ddl = re.sub(r"LOCATION\s+'[^']+'", '', create_temp_ddl, flags=re.IGNORECASE)
        logger.info(f"Creating temporary table: {db_name}.{temp_table_name}")
        run_hive_command(create_temp_ddl)

        # Step 3: 获取所有分区
        partitions = get_table_partitions(db_name, table_name)
        if not partitions:
            logger.warning("No partitions found. Processing as non-partitioned table.")
            partitions = [""]  # 空字符串表示无分区

        # Step 4: 遍历每个分区进行合并
        for part in partitions:
            logger.info(f"Processing partition: {part or 'NON-PARTITIONED'}")

            # 构建 WHERE 条件
            if part:
                spec_dict = extract_partition_spec(part)
                where_clause = build_partition_where_clause(spec_dict)
                partition_path = build_partition_path(spec_dict)
            else:
                where_clause = ""
                partition_path = ""

            # 统计原始行数
            original_count = count_rows_in_partition(db_name, table_name, where_clause)
            logger.info(f"Original row count: {original_count}")

            # 清空临时表（重要！）
            run_hive_command(f"TRUNCATE TABLE {db_name}.{temp_table_name};")

            # 将分区数据写入临时表
            insert_to_temp = f"INSERT OVERWRITE TABLE {db_name}.{temp_table_name} SELECT * FROM {db_name}.{table_name}"
            if where_clause:
                insert_to_temp += f" WHERE {where_clause}"
            insert_to_temp += ";"
            logger.info("Writing data to temporary table...")
            run_hive_command(insert_to_temp)

            # 从临时表写回原分区（覆盖）
            if part:
                # 分区表：需指定分区
                insert_back = f"INSERT OVERWRITE TABLE {db_name}.{table_name} PARTITION ({partition_path}) SELECT * FROM {db_name}.{temp_table_name};"
            else:
                # 非分区表
                insert_back = f"INSERT OVERWRITE TABLE {db_name}.{table_name} SELECT * FROM {db_name}.{temp_table_name};"
            logger.info("Writing back to original table...")
            run_hive_command(insert_back)

            # 校验行数
            new_count = count_rows_in_partition(db_name, table_name, where_clause)
            if new_count != original_count:
                raise RuntimeError(f"Row count mismatch! Original: {original_count}, After: {new_count}")
            logger.info(f"Row count verified: {new_count}")

        logger.info("All partitions processed successfully.")

    finally:
        # Step 5: 删除临时表
        try:
            logger.info(f"Dropping temporary table: {db_name}.{temp_table_name}")
            run_hive_command(f"DROP TABLE IF EXISTS {db_name}.{temp_table_name};")
        except Exception as e:
            logger.error(f"Failed to drop temp table: {e}")

if __name__ == "__main__":
    parser = argparse.ArgumentParser(description="Hive 小文件合并工具（INSERT OVERWRITE 方式）")
    parser.add_argument("--beeline-url", required=True, help="Beeline JDBC URL，如 jdbc:hive2://host:2181/;serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=hiveserver2")
    parser.add_argument("--username", required=True, help="Hive 用户名")
    parser.add_argument("--password", required=True, help="Hive 密码")
    parser.add_argument("--database", required=True, help="数据库名")
    parser.add_argument("--table", required=True, help="表名")

    args = parser.parse_args()
    main(args)