from kafka import KafkaConsumer
import json
from multiprocessing import Pool
import time
import threading


##定义参数
#程序处理的接口
handler_API = ["identity_pro","financial_pro","internet_pro","social_pro","trip_pro","communication_pro"]
#不同接口处理程序group_id
gids = {"identity_pro":"ide_g","financial_pro":"fin_g","internet_pro":"int_g","social_pro":"soc_g","trip_pro":"tri_g","communication_pro":"com_g"}
#不同group_id下消费程序的数量
xf = {"identity_pro":1,"financial_pro":1,"internet_pro":1,"social_pro":1,"trip_pro":1,"communication_pro":1}
#kafka服务器地址和端口
kafka_servers=["192.168.30.120:9092"]
#各接口数据字段数
check_data = {"identity_pro":7,"financial_pro":4,"internet_pro":5,"social_pro":3,"trip_pro":3,"communication_pro":4}

class Handler(object):
    def __init__(self,handler_API,gids,xf,kafka_servers,check_data):
        self.hanapi = handler_API
        self.gids = gids
        self.xf = xf
        self.kafka_servers = kafka_servers
        self.check_data = check_data

    def conumers(self,api_name,kfksers):
        gid = self.gids[api_name]
        # eraliest为从最早的偏移量开始
        #con = KafkaConsumer(api_name,bootstrap_servers=[kfksers],auto_offset_reset="earliest",value_deserializer=json.loads)
        #auto_offset_reset默认为latest
        con = KafkaConsumer(api_name, bootstrap_servers=[kfksers], auto_offset_reset="latest",value_deserializer=json.loads)
        Tm = time.strftime("%Y-%m-%d %H:%M:%S")
        for message in con:
            print("[%s] %s:%d:%d:key:%s"%(Tm,message.topic,message.partition,message.offset,message.key))
            data = message.value
            if int(self.check_data[api_name]) == len(data.keys()):
                print(data)
                with open("d:/%s.txt"%api_name,"a") as F:
                    F.write("[%s]--%s\n"%(Tm,data))
                F.close()
            else:
                print("%s--数据不完整，预期%s，实际%s"%(data,int(self.check_data[api_name]),len(data.keys())))
                with open("d:/%s.txt"%api_name,"a") as F:
                    F.write("[%s]--%s--%s"%(Tm,data,"数据字段数不符合要求！！！\n"))
                F.close()

    def conumers_thread_num(self,api_name,kfksers):
        thread_num = int(self.xf[api_name])
        for i in range(thread_num):
            t = threading.Thread(target=conumers_wrapper,args=(self,api_name,kfksers))
            t.setDaemon(True)
            t.start()
            t.join()

    def multirun(self):
        p = Pool(5)
        for i in range(int(len(self.hanapi))):
            api_name = self.hanapi[i]
            kfksers = ",".join(self.kafka_servers)
            print("开启子进程%s"%i)
            p.apply_async(conumers_thread_num_wrapper,args=(self,api_name,kfksers))
        print('等待所有添加的进程运行完毕。。。')
        p.close()
        p.join()


def conumers_wrapper(cls_instance,api_name,kfksers):
    return cls_instance.conumers(api_name,kfksers)

def conumers_thread_num_wrapper(cls_instance,api_name,kfksers):
    return cls_instance.conumers_thread_num(api_name,kfksers)

if __name__ == "__main__":
    Hd = Handler(handler_API,gids,xf,kafka_servers,check_data)
    Hd.multirun()