# coding=utf-8
"""
Author  : Jane
Contact : xijian@ict.ac.cn
Time    : 2021/3/17 9:57
Desc:
https://www.jianshu.com/p/d34391a380da
https://www.cnblogs.com/rexcheny/articles/9463979.html
"""
import tensorflow as tf
import pkg_resources

from kafka import KafkaConsumer, TopicPartition, KafkaProducer
from kafka.errors import KafkaError, KafkaTimeoutError

import json
import pandas as pd
import numpy as np
import time
import logging
import os

import sys
sys.path.append('/home/xijian/pycharm_projects/JSNews/')
from src.junshi.classify.han.han_master.predict_online import create_hanstyle_inputdata_batch_online, _init
from src.junshi.classify.ensemble.config import *
from src.junshi.classify.ensemble.eval import load_all_models, predict_by_all_models, ensemble_predict_by_vote
from src.junshi.db.mysql_db import dbapi



logging.basicConfig(level=logging.INFO)
os.environ['CUDA_VISIBLE_DEVICES']='3'
# 设置按需使用GPUs
gpus = tf.config.experimental.list_physical_devices(device_type='GPU')
if gpus:
    try:
        for gpu in gpus:
            tf.config.experimental.set_memory_growth(gpu, True)
        logical_gpus = tf.config.experimental.list_logical_devices(device_type='GPU')
        print('************************** ', len(gpus), 'Physical GPUs, ', len(logical_gpus), 'Logical GPUs')
    except RuntimeError as e:
        print(e)


# kafka配置
kafka_host = '10.10.149.20'
kafka_port = 6667
# consume_kafka_topic = 'js_news'
consume_kafka_topic = 'junshi_guanzhu'
# consume_kafka_topic = 'history-data'
produce_kafka_topic = 'js6class'
# kafka_groupid = 'TestGroup001'
kafka_groupid = 'Group003'
clientid = 'Test'

consumer = KafkaConsumer(
        consume_kafka_topic,
        group_id=kafka_groupid,
        bootstrap_servers='{kafka_host}:{kafka_port}'.format(kafka_host=kafka_host, kafka_port=kafka_port),
        client_id=clientid,
        auto_offset_reset="earliest",
        key_deserializer=lambda m: json.loads(m.decode('utf-8')),
        value_deserializer=lambda m: json.loads(m.decode('utf-8'))
        )
# partition = TopicPartition(topic=consume_kafka_topic, partition=1)
# consumer.assign([partition])
consumer.poll(timeout_ms=10)
# consumer.seek_to_end() # 获取最新的数据

producer = KafkaProducer(
        # group_id=kafka_groupid,
        bootstrap_servers='{kafka_host}:{kafka_port}'.format(kafka_host=kafka_host, kafka_port=kafka_port), #
        client_id='Procucer01', #
        key_serializer=lambda m: json.dumps(m, ensure_ascii=False).encode('utf-8'), #
        value_serializer=lambda m: json.dumps(m, ensure_ascii=False).encode('utf-8'), #
        acks=1,
        retries=3,
        batch_size=1048576, # 1M
        linger_ms=100 # 间隔100ms就发送缓冲池中的数据，不必等待batch_size装满
        )


# 数据库连接
dbapi.connect2db('root', 'mzy123', 'jkw_demo', ip='10.10.149.20')
sql_insert = "INSERT INTO classification_gz \
(title, content, publish_time, collect_time, site, url, source, news_author, mainNavigation, subNavigation, pred_label_gz, pred_label_area, text_id) \
VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"

STOPWORDS_SET = set()  # 停用词集合
STOPWORDS_FILENAME = 'zh_data/stopwords.txt'  # 停用词文件名
USERDICT_FILENAME = 'zh_data/all_js_keywords.txt'


vocab_processor = _init(TOKENIZER_PATH)




if __name__=='__main__':
    # 加载模型
    model_name = [
        '../han/han_master/checkpoint_w2v/20210311',
        '../textcnn/tcnn_master/checkpoint_w2v/20210311',
        '../xlnet/xlnet_master/save/20210403/epoch30/checkpoint/',
        '../dpcnn/dpcnn_tf/save/20210422/checkpoint/'
    ]
    starttime = time.time()
    ensemble_models = load_all_models(model_name)
    endtime = time.time()
    logging.info(f'ensemble 加载模型耗时：{endtime - starttime:.2f}s')

    # text = '搜排爆队员正在执行作业。中国军网桂林11月19日电（记者陈路帆、胡尊）11月18日上午，东盟防长扩大会反恐专家组联合实兵演习火热进行，各突击分队根据特定的场>地和情景进行了针对性的适应性训练。其中尤其引人注目的是中泰俄联合搜排爆分队。在全流程针对性训练中，三国官兵主要对火车站南侧皮卡车内爆炸物、可疑化学危险品和剧>院附属楼内两处爆炸装置进行搜排爆训练。搜排爆分队分成三个小组同步展开作业。记者在第一小组处置观察到，搜排手身着防爆服，将信号屏蔽装置安放在皮卡车附近，防止恐>怖分子远程遥控起爆。随后防化侦察组使用毒剂报警器对车辆四周进行了严密细致的检查，在确认没有毒剂泄露的情况下，排爆员收到了行动信号，随即向着危险品移动。排爆员>用探测器确定了爆炸物性质，随后对该装置进行了人工拆除。当确认安全后，他转身向防化侦察组示意可以转移化学危险品。在一组行动的同时，二组经过研判，最终使用爆炸物>销毁器分解销毁了附属楼内的一处爆炸装置；三组则是利用排爆机器人快速转移了剧院附属楼内另一处爆炸装置。各小组均为混编作业，队员来自中国、泰国和俄罗斯。他们虽然>语言不通，但通过手语以及前期的强化训练，动作迅速，配合默契。在前期展开的多国针对性强化训练中，队员们就各项技术问题深入交流探讨，在标准流程的基础上不断完善细>节，使搜排爆作业流程不断向实战化贴近。俄方队员更注重作业效率，他们在实战中多采用封锁后直接投放炸药诱爆销毁的方法，通常能以较快时间为反恐作战行动开辟通路。考>虑到此次联演的全面性和安全性，在操作流程方面我们更多地借鉴了泰方的一些做法，比如进行充分的器材准备和现场评估，这样在实战中能最大程度减少人员伤亡。此次联合反>恐特混大队的支援保障队队长刘坤少校说道。当判定皮卡车后两个玻璃器皿有可疑化学品后，泰方提出先覆盖一层薄膜进行初步隔离，尽量减少其泄露后对排爆人员造成的危害。>这是一条严谨而且贴近实战的动作规范，得到了其他队员的认可。刘坤少校说。据悉，此次演练结束后，俄方还根据实战经验提出在下一阶段的训练中，增加处置室外爆炸装置点>位内容。'
    # write_file = './online_data/predict_online_4_1_guanzhu.txt'
    # f = open(write_file, 'a', encoding='UTF-8')
    try:
        logging.info('*'*27 + ' 开始获取kafka数据 ' + '*'*27)
        # """
        for msg in consumer:
            # print(msg)
            data = msg.value
            # print(type(data), data)
            # js_text = data['Value']['content']
            text = data['content']
            # text = data['Value']['content']
            # print('*'*27, text)
            predicted_labels, predicted_probs = predict_by_all_models(model_name, ensemble_models, [text])
            labels = list(range(num_classes))
            y_pred = ensemble_predict_by_vote(labels, predicted_labels, predicted_probs, classifier_num=len(model_name))
            print('*'*27, y_pred)

            # f.write(text+'\t'+str(y_pred[0])+'\n')
            # f.flush() # 实时写入

            data['pred_label'] = id_to_labels[y_pred[0]]

            # 准备入库数据
            _data = []
            _data.append(data['title'])
            _data.append(data['content'])
            _data.append(data['time']) # 新闻发布时间
            _data.append(data['inserTime'])  # 新闻采集时间
            _data.append(data['site'])
            _data.append(data['url'])
            _data.append(data['source'])
            _data.append(data['news_author'])
            _data.append(data['mainNavigation'])
            _data.append(data['subNavigation'])
            _data.append(data['pred_label'])
            _data.append(data['area'])
            _data.append(data['id'])

            dbapi.batch_insert(sql_insert, [_data])
            # print('_data:', _data)
            send_msg = {
                'key': None,
                # 'value': {'content': '我是习小健！'},
                # 'value': {'content': text, 'pred_label': y_pred[0]},
                'value': data,
                'partition': None
            }

            try:
                producer.send(
                    produce_kafka_topic,
                    **send_msg
                )
                print('发送：', produce_kafka_topic, send_msg)
            except Exception as err:
                print('*' * 27, err)


        """
        while True:
            # filepath = DATA_PATH + 'js_news/labeled_data/20201217/class_for_4_and_1/han/js_pd_tagged_test.txt'
            filepath = DATA_PATH + 'js_news/labeled_data/20201217/class_for_4_and_1/han/js_pd_tagged_train.txt'
            df_data = pd.read_csv(filepath, encoding='UTF-8', sep='\t', header=0, index_col=False, usecols=[1, 2])
            df_data = df_data.dropna()
            # df_data = df_data[df_data.label == 0]
            x_data, y_data = df_data['clear_content'], df_data['label']

            for msg in x_data:
                # if pred_label=='6大类'
                text = msg
                if True:
                    predicted_labels, predicted_probs = predict_by_all_models(model_name, ensemble_models, [text])
                    labels = list(range(num_classes))
                    y_pred = ensemble_predict_by_vote(labels, predicted_labels, predicted_probs, classifier_num=len(model_name))
                    print(y_pred)

                    send_msg = {
                        'key': None,
                        # 'value': {'content': '我是习小健！'},
                        'value': {'content': text, 'pred_label': y_pred[0]},
                        'partition': None
                    }

                    try:
                        producer.send(
                            produce_kafka_topic,
                            **send_msg
                        )
                        print('发送：', produce_kafka_topic, send_msg)
                    except Exception as err:
                        print('*' * 27, err)

                    time.sleep(3) # 3s send 一条数据  

            time.sleep(10) # 单位：秒 10s后重新执行一遍
        """
    except KafkaTimeoutError as err:
        print('*' * 27, err)
    except KeyboardInterrupt as e:
        print('*' * 27, e)
    except KafkaError as e:
        print('*' * 27, e)
    except Exception as err:
        print('*' * 27, err)
    finally:
        producer.close()
        # f.close()
        print('over!')
