'''
@Company: TWL
@Author: xue jian
@Email: xuejian@kanzhun.com
@Date: 2020-04-23 15:03:24
'''
# -*- coding: UTF-8 -*-
from kafka import KafkaProducer
from kafka import KafkaConsumer
from kafka import TopicPartition
from cityhash import CityHash64
import time, json, random
import threading
from train_data_pb2 import TrainData
from subprocess import *

fea_list_file = open("/data1/zangruozhou/sync/transfer_kafka/conf/nn_galaxy.fea", 'r')
fea_list = json.load(fea_list_file)
fea_list_file.close()

# print(fea_list)
for fea, fea_conf in fea_list.items():
    fea_list[fea] = fea_conf['slot']
print(fea_list)

fea_code_file = open("/data1/zangruozhou/sync/transfer_kafka/conf/conf.json", 'r')
fea_code_tmp = json.load(fea_code_file)
fea_code_file.close()

fea_code = fea_code_tmp['fea_code']
# print(fea_code)


fea_cut_file = open("/data1/zangruozhou/sync/transfer_kafka/conf/fea_galaxy.conf", 'r')
fea_cut = json.load(fea_cut_file)
fea_cut_file.close()
fea_cut = fea_cut['fea_cut']

# slot_cut = {}
# for fea, cut in fea_cut.items():
#     if fea in fea_list:
#         slot_cut[fea_list[fea]] = cut

# # print("slot_cut = ", slot_cut)
# print(json.dumps(slot_cut))
# exit(0)

category_fea = ["boss_id", "job_id", "exp_id", "geek_id", "boss_l1code", "boss_l2code", "geek_position", "geek_combine_code", "geek_degree_new", "geek_gender", "geek_degree", "geek_apply_status", "geek_workyears", "geek_school_level", "geek_cmp_level", "geek_overseas_tag", "geek_city", "geek_major", "boss_position", "geek_rev_work_year", "boss_combine_code", "boss_city", "job_workyears", "job_degree", "boss_cmp_level", "job_overseas_tag", "boss_title_type", "boss_comp_scale", "rcd_list_source"]
sequence_fea = ["b2g_workyears_recent10", "b2g_cmp_level_recent10", "b2g_school_level_recent10", "b2g_school_type1_recent10", "b2g_school_type2_recent10", "b2g_degree_recent10", "b2g_gender_recent10", "b2g_apply_status_recent10", "b2g_salary_recent10", "boss_addf_geek_recent10", "boss_addf_expect_recent10", "job_addf_geek_recent10", "job_addf_expect_recent10", "boss_success_geek_recent10"]

rcd_source_dict = {'f1_brcd': '0', 'lat_brcd': '1', 'f2_brcd_new': '2', 'f2_brcd_seeme': '3', 'f2_brcd_interesting': '4', 'brcd_search': '5'}

bootstrap_servers=['172.21.32.178:9092', '172.21.32.154:9092', '172.21.32.41:9092', '172.21.32.50:9092', '172.21.32.98:9092']
topic = "boss.arc.recommender.bossrec_sample_flow"
consumer = KafkaConsumer(topic, auto_offset_reset='latest', group_id='transfer', bootstrap_servers=bootstrap_servers)
# consumer.unsubscribe()

# partitions = consumer.partitions_for_topic(topic)
# time_dict = {}
# for part in partitions:
#     time_dict[TopicPartition(topic, part)] = time.time()*1000  - 20 * 3600 * 1000

# ot_dict = consumer.offsets_for_times(time_dict)
# print(ot_dict)
# for k, v in ot_dict.items():
#     if v != 'null':
#         consumer.assign([k])
#         consumer.seek(k, v[0])
#         print(consumer.end_offsets([k]))
#         last_offset = consumer.end_offsets([k])[k]
#         no_consume_num = last_offset - v[0]
#         print(no_consume_num)

def string2fid(value, slot):
    return CityHash64(str(value)) & ((1<<52) -1) | (slot << 52)

# def transfer_data2(data):
#         # print(type(data.decode()))
#         data = json.loads(data.decode())
#         for fea in ['geekId', 'bossId', 'jobId', 'expectId']:
#             if data.get(fea, 0) == 0:
#                 return None
#         train_data = TrainData()
#         fids = train_data.fids
#         fids.append(string2fid(data.get('geekId', ''), fea_list['geek_id']))
#         fids.append(string2fid(data.get('jobId', ''), fea_list['job_id']))
#         fids.append(string2fid(data.get('expectId', ''), fea_list['exp_id']))
#         fids.append(string2fid(data.get('bossId', ''), fea_list['boss_id']))
#         train_data.labels.append(float(data.get('label', {}).get('detail', 0)))
#         train_data.labels.append(float(data.get('label', {}).get('addf', 0)))
#         #handle time fea
#         time_step = data.get('ts', 0)/1000
#         timeArray = time.localtime(time_step)
#         fids.append(string2fid(time.strftime("%H", timeArray), fea_list['hour_of_day']))
#         day_of_week = int(time.strftime("%w", timeArray))
#         if day_of_week == 0:
#              day_of_week = 7   
#         fids.append(string2fid(day_of_week, fea_list['day_of_week']))
#         #handle all fea
#         fea1 = data.get('listFeatures', {})
#         fea2 = data.get('userFeatures', {})
#         fea1.update(fea2)
#         for fea, slot in fea_list.items():
#             if fea in ['geek_id', 'job_id', 'exp_id', 'boss_id', 'hour_of_day', 'day_of_week'] or fea not in fea_code:
#                 continue
#             if fea in category_fea:
#                 fids.append(string2fid(fea1.get(fea_code[fea], "0"), slot))
#             elif fea in sequence_fea:
#                 tmp_fea = fea1.get(fea_code[fea], "0")
#                 for tmp_value in tmp_fea.split(','):
#                      fids.append(string2fid(tmp_value, slot))   
#             else:
#                 tmp_cut = fea_cut[fea]
#                 tmp_fea = float(fea1.get(fea_code[fea], "0"))
#                 tmp_res = 0
#                 for i in range(len(tmp_cut) - 1):
#                     if tmp_fea > tmp_cut[i] and tmp_fea <= tmp_cut[i+1]:
#                         tmp_res = i + 1
#                 if tmp_fea > tmp_cut[-1]:
#                     tmp_res = len(tmp_cut) + 1
#                 fids.append(string2fid(tmp_res, slot))   
#         res = train_data.SerializeToString()
#         # res = str(res)[2:-1]
#         # print('true ? ', res.replace('\n', 'bosszhipin') == '')
#         # return res.replace('\n', 'bosszhipin')
#         # return b""
#         # return res.replace('\\n', 'bosszhipin') + '\n'
#         res = str(res, encoding="utf-8")
#         print(res)
#         return res
print('fea_list = ', fea_list)
def transfer_data(data):
        # print(type(data.decode()))
        data = json.loads(data.decode())
        for fea in ['geekId', 'bossId', 'jobId', 'expectId']:
            if data.get(fea, 0) == 0:
                return None
        train_data = {'labels': []}
        fids = []
        fids.append(string2fid(data.get('geekId', ''), fea_list['geek_id']))
        fids.append(string2fid(data.get('jobId', ''), fea_list['job_id']))
        fids.append(string2fid(data.get('expectId', ''), fea_list['exp_id']))
        fids.append(string2fid(data.get('bossId', ''), fea_list['boss_id']))

        train_data['labels'].append(float(data.get('label', {}).get('detail', 0)))
        train_data['labels'].append(float(data.get('label', {}).get('addf', 0)))
        #handle time fea
        time_step = data.get('ts', 0)/1000
        train_data['ts'] =  time_step
        timeArray = time.localtime(time_step)
        fids.append(string2fid(time.strftime("%H", timeArray), fea_list['hour_of_day']))
        day_of_week = int(time.strftime("%w", timeArray))  
        fids.append(string2fid(day_of_week, fea_list['day_of_week']))

        fids.append(string2fid(rcd_source_dict[data.get('rcdBzType', 'f1_brcd')], fea_list['rcd_list_source']))

        #handle all fea
        fea1 = data.get('listFeatures', {})
        fea2 = data.get('userFeatures', {})
        fea1.update(fea2)
        seq_num = 0
        seq_fea = []
        for fea, slot in fea_list.items():
            if fea in ['geek_id', 'job_id', 'exp_id', 'boss_id', 'hour_of_day', 'day_of_week', 'rcd_list_source'] or fea not in fea_code:
                # if fea not in fea_code:
                #     print('not in = ', fea)
                continue
        #     print(fea + ':' + str(slot))
            if fea == 'geek_rev_work_year':
                tmp_fea = fea1.get(fea_code[fea], "0")
                tmp_fea = tmp_fea.split(':')[0]
                fids.append(string2fid(tmp_fea, slot))
            elif fea in category_fea:
                fids.append(string2fid(fea1.get(fea_code[fea], "0"), slot))
            elif fea in sequence_fea:
                seq_num += 1
                seq_fea.append(fea)
                tmp_fea = fea1.get(fea_code[fea], "-3")
                tmp_fea = tmp_fea.split(',')
                # print(len(tmp_fea.split(',')))
                if len(tmp_fea) > 10:
                    tmp_fea = tmp_fea[:10]
                elif len(tmp_fea) < 10:
                    while len(tmp_fea) != 10:
                        tmp_fea.append('-3')
                for tmp_value in tmp_fea:
                     fids.append(string2fid(tmp_value, slot))   
            else:
                tmp_cut = fea_cut[fea]
                tmp_fea = float(fea1.get(fea_code[fea], "0"))
                tmp_res = 0
                for i in range(len(tmp_cut) - 1):
                    if tmp_fea > tmp_cut[i] and tmp_fea <= tmp_cut[i+1]:
                        tmp_res = i + 1
                if tmp_fea > tmp_cut[-1]:
                    tmp_res = len(tmp_cut) + 1
                fids.append(string2fid(tmp_res, slot))   
                    
        # print('seq_num = ', seq_num)
        # print(seq_fea)
        # res = str(res)[2:-1]
        # print('true ? ', res.replace('\n', 'bosszhipin') == '')
        # return res.replace('\n', 'bosszhipin')
        # return b""
        # return res.replace('\\n', 'bosszhipin') + '\n'
        # print(len(fids))
        train_data['fids'] = fids
        # print('train_data = ', train_data)
        # a = json.dumps(train_data)
        # print('after dump')
        return json.dumps(train_data)

i = 0

producer = KafkaProducer(bootstrap_servers=bootstrap_servers)
produce_topic = 'boss.arc.recommender.bossrec_fid_flow'
# proc = Popen('/data2/bin/galaxy/galaxy -log /data2/bin/galaxy/conf/log.properties -layer /data2/bin/galaxy/conf/recent_id.net -ps_url 172.18.39.201:8234 -w 5000 -s 1000 -b 200 -mn test -log_out log -labels l0,one -ne 0.99 -is_json true', stdin=PIPE, shell=True)
# proc_in = proc.stdin
for msg in consumer:
    try:
        producer.send(produce_topic, transfer_data(msg.value).encode(encoding='utf-8'))
        # exit(0)
#     res = transfer_data(msg.value)
#     print('res = ', res.encode(encoding='utf-8'))
#     print(res.encode(encoding='utf-8'))
#     proc_in.write((res + '\n').encode(encoding='utf-8'))
#     proc_in.flush()
    except:
        print('transfer error!!!')
        continue
#     if i > 100000:
#         tp = TopicPartition(msg.topic, msg.partition)
#         highwater = consumer.highwater(tp)
#         # print(highwater)
#         # print(msg.offset)
#         lag = (highwater - 1) - msg.offset
#         if lag > 100000:
#             timeArray = time.localtime(time.time())
#             otherStyleTime = time.strftime("%Y-%m-%d %H:%M:%S", timeArray)
#             print(str(otherStyleTime) + '; lag = ' +  str(lag))
#         i = 0
    i += 1
    if i > 6000000:
        print('consumer 2 million data')
        i = 0