import json
import base64
import requests
from confluent_kafka import Consumer, Producer, KafkaError
import argparse

def parse_args():
    """解析命令行参数"""
    parser = argparse.ArgumentParser(description="Kafka消息合并程序")
    parser.add_argument("--bootstrap-servers", required=True, help="Kafka服务器地址，如192.168.1.100:9092")
    parser.add_argument("--group-id", default="merge_consumer_group", help="消费者组ID")
    parser.add_argument("--topic1", required=True, help="topic1名称")
    parser.add_argument("--topic2", required=True, help="topic2名称")
    parser.add_argument("--topic3", required=True, help="topic3名称")
    parser.add_argument("--output-topic", required=True, help="输出topic名称")
    parser.add_argument("--libId", required=True, help="libId名称")
    parser.add_argument("--externalId", required=True, help="externalId名称")
    return parser.parse_args()

def fetch_image_as_base64(url):
    """获取图片并转换为base64编码"""
    try:
        response = requests.get(url, timeout=5)
        response.raise_for_status()
        return base64.b64encode(response.content).decode('utf-8')
    except Exception as e:
        print(f"获取图片失败 {url}: {str(e)}")
        return None

def delivery_report(err, msg):
    """Kafka生产者回调函数"""
    if err is not None:
        print(f'消息发送失败: {err}')
    else:
        print(f'消息已发送到 {msg.topic()} [{msg.partition()}]')

def process_message(msg, user_data, producer, output_topic,externalId,libId):
    """处理Kafka消息"""
    try:
        data = json.loads(msg.value().decode('utf-8'))
        
        # 确定用户ID字段名
        if msg.topic() == args.topic1:
            userid = data['GMSFHM']
        elif msg.topic() == args.topic2:
            userid = data['SFZH']
        elif msg.topic() == args.topic3:
            userid = data['SFZH']
        else:
            return
        
        # 初始化用户数据（如果不存在）
        if userid not in user_data:
            user_data[userid] = {'userid': userid}
        
        # 合并数据
        if msg.topic() == args.topic1:
            user_data[userid]['certificateNo'] = userid
            user_data[userid]['labels'] = ["常驻人口"]
            user_data[userid]['psersonName'] = data['XM']
        elif msg.topic() == args.topic2:
            user_data[userid]['certificateNo'] = userid
            user_data[userid]['labels'] = ["暂住人口"]
            user_data[userid]['psersonName'] = data['XM']
        elif msg.topic() == args.topic3:
            user_data[userid]['certificateNo'] = userid
            user_data[userid]['externalId'] = externalId
            user_data[userid]['libId'] = libId
            base64_img = fetch_image_as_base64(data['URL'])
            if base64_img:
                user_data[userid]['photoBase64'] = base64_img
        
        # 检查是否所有数据都已收集
        current_data = user_data[userid]
        if all(key in current_data for key in ['certificateNo', 'labels', 'photoBase64']):
            # 发送合并后的数据到输出topic
            print(json.dumps(current_data,ensure_ascii=False))
            producer.produce(
                output_topic,
                json.dumps(current_data,ensure_ascii=False).encode('utf-8'),
                callback=delivery_report
            )
            producer.flush()
            # 删除已处理的数据
            del user_data[userid]
            
    except Exception as e:
        print(f"处理消息出错: {str(e)}")

if __name__ == "__main__":
    args = parse_args()
    
    # Kafka消费者配置
    consumer_conf = {
        'bootstrap.servers': args.bootstrap_servers,
        'group.id': args.group_id,
        'auto.offset.reset': 'earliest',
        'enable.auto.commit': False  # 手动提交offset
    }
    
    # Kafka生产者配置
    producer_conf = {
        'bootstrap.servers': args.bootstrap_servers
    }
    
    # 创建Kafka消费者
    consumer = Consumer(consumer_conf)
    consumer.subscribe([args.topic1, args.topic2, args.topic3])
    
    # 创建Kafka生产者
    producer = Producer(producer_conf)
    
    # 存储用户数据的字典
    user_data = {}
    
    # 主消费循环
    try:
        while True:
            msg = consumer.poll(1.0)
            if msg is None:
                continue
            if msg.error():
                if msg.error().code() == KafkaError._PARTITION_EOF:
                    continue
                else:
                    print(f"消费者错误: {msg.error()}")
                    break
            
            process_message(msg, user_data, producer, args.output_topic,args.externalId,args.libId)
            # 手动提交offset
            consumer.commit(asynchronous=False)
    
    except KeyboardInterrupt:
        print("程序终止")
    finally:
        consumer.close()
        producer.flush()

