'''
@Company: TWL
@Author: xue jian
@Email: xuejian@kanzhun.com
@Date: 2020-04-23 15:05:20
'''
# -*- coding: UTF-8 -*-
from kafka import KafkaProducer
from kafka import KafkaConsumer
from kafka import TopicPartition
from cityhash import CityHash64
import time, json, random
import threading
from train_data_pb2 import TrainData
from subprocess import *

fea_list_file = open("/data1/arc_six/wuxiushan/transfer_kafka/conf/nn_flash.fea", 'r')
fea_list = json.load(fea_list_file)
fea_list_file.close()

for fea, fea_conf in fea_list.items():
    fea_list[fea] = fea_conf['slot']
print(fea_list)

fea_code_file = open("/data1/arc_six/wuxiushan/transfer_kafka/conf/conf.json", 'r')
fea_code_tmp = json.load(fea_code_file)
fea_code_file.close()

fea_code = fea_code_tmp['fea_code']

fea_cut_file = open("/data1/arc_six/wuxiushan/transfer_kafka/conf/fea_flash.conf", 'r')
fea_cut = json.load(fea_cut_file)
fea_cut_file.close()
fea_cut = fea_cut['fea_cut']

category_fea = ["boss_id", "job_id", "exp_id", "geek_id", "boss_l1code", "boss_l2code", "geek_position", "geek_combine_code", "geek_degree_new", "geek_gender", "geek_degree", "geek_apply_status", "geek_workyears", "geek_school_level", "geek_cmp_level", "geek_overseas_tag", "geek_city", "geek_major", "boss_position", "geek_rev_work_year", "boss_combine_code", "boss_city", "job_workyears", "job_degree", "boss_cmp_level", "job_overseas_tag", "boss_title_type", "boss_comp_scale", "rcd_list_source"]
sequence_fea = ["b2g_workyears_recent10", "b2g_cmp_level_recent10", "b2g_school_level_recent10", "b2g_school_type1_recent10", "b2g_school_type2_recent10", "b2g_degree_recent10", "b2g_gender_recent10", "b2g_apply_status_recent10", "b2g_salary_recent10", "boss_addf_geek_recent10", "boss_addf_expect_recent10", "job_addf_geek_recent10", "job_addf_expect_recent10", "boss_success_geek_recent10"]

rcd_source_dict = {'f1_grcd': '0', 'lat_grcd': '1', 'f2_grcd_new': '2', 'f2_grcd_seeme': '3', 'f2_grcd_interesting': '4', 'grcd_search': '5'}

bootstrap_servers=['172.21.32.178:9092', '172.21.32.154:9092', '172.21.32.41:9092', '172.21.32.50:9092', '172.21.32.98:9092']
topic = "boss.arc.recommender.geekrec_sample_flow"
consumer = KafkaConsumer(topic, auto_offset_reset='latest', group_id='transfer', bootstrap_servers=bootstrap_servers)

def string2fid(value, slot):
    return CityHash64(str(value)) & ((1<<52) -1) | (slot << 52)

print('fea_list = ', fea_list)
def transfer_data(data):
    data = json.loads(data.decode())
    for fea in ['geekId', 'bossId', 'jobId', 'expectId']:
        if data.get(fea, 0) == 0:
            return None
    if data.get('label', {}).get('addf', 0) == '0':
        if random.randint(1, 15) != 5:
            return None
    train_data = {'labels': []}
    fids = []
    fids.append(string2fid(data.get('geekId', ''), fea_list['geek_id']))
    fids.append(string2fid(data.get('jobId', ''), fea_list['job_id']))
    fids.append(string2fid(data.get('expectId', ''), fea_list['exp_id']))
    fids.append(string2fid(data.get('bossId', ''), fea_list['boss_id']))

    train_data['labels'].append(float(data.get('label', {}).get('detail', 0)))
    train_data['labels'].append(float(data.get('label', {}).get('addf', 0)))
    #handle time fea
    time_step = data.get('ts', 0)/1000
    train_data['ts'] =  time_step
    timeArray = time.localtime(time_step)
    fids.append(string2fid(time.strftime("%H", timeArray), fea_list['hour_of_day']))
    day_of_week = int(time.strftime("%w", timeArray))  
    fids.append(string2fid(day_of_week, fea_list['day_of_week']))

    fids.append(string2fid(rcd_source_dict[data.get('rcdBzType', 'f1_grcd')], fea_list['rcd_list_source']))

    #handle all fea
    fea1 = data.get('listFeatures', {})
    fea2 = data.get('userFeatures', {})
    fea1.update(fea2)
    seq_num = 0
    seq_fea = []
    for fea, slot in fea_list.items():
        if fea in ['geek_id', 'job_id', 'exp_id', 'boss_id', 'hour_of_day', 'day_of_week', 'rcd_list_source'] or fea not in fea_code:
            continue
        if fea == 'geek_rev_work_year':
            tmp_fea = fea1.get(fea_code[fea], "0")
            tmp_fea = tmp_fea.split(':')[0]
            fids.append(string2fid(tmp_fea, slot))
        elif fea in category_fea:
            fids.append(string2fid(fea1.get(fea_code[fea], "0"), slot))
        elif fea in sequence_fea:
            seq_num += 1
            seq_fea.append(fea)
            tmp_fea = fea1.get(fea_code[fea], "-3")
            tmp_fea = tmp_fea.split(',')
            if len(tmp_fea) > 10:
                tmp_fea = tmp_fea[:10]
            elif len(tmp_fea) < 10:
                while len(tmp_fea) != 10:
                    tmp_fea.append('-3')
            for tmp_value in tmp_fea:
                    fids.append(string2fid(tmp_value, slot))   
        else:
            tmp_cut = fea_cut[fea]
            tmp_fea = float(fea1.get(fea_code[fea], "0"))
            tmp_res = 0
            for i in range(len(tmp_cut) - 1):
                if tmp_fea > tmp_cut[i] and tmp_fea <= tmp_cut[i+1]:
                    tmp_res = i + 1
            if tmp_fea > tmp_cut[-1]:
                tmp_res = len(tmp_cut) + 1
            fids.append(string2fid(tmp_res, slot))   
                
    train_data['fids'] = fids
    return json.dumps(train_data)

i = 0

producer = KafkaProducer(bootstrap_servers=bootstrap_servers)
produce_topic = 'boss.arc.recommender.geekrec_fid_flow'
for msg in consumer:
    try:
        producer.send(produce_topic, transfer_data(msg.value).encode(encoding='utf-8'))
    except:
        print('transfer error!!!')
        continue
    i += 1
    if i > 6000000:
        print('consumer 2 million data')
        i = 0