# -*- coding: utf-8 -*-
import grpc
import time
import os
import json
import signal
from concurrent import futures
from common.utils.id_util import uuid
from common.conf import SERVICE_TIME_INTERVAL
from common.utils.tasks import async_task
from kafka import KafkaConsumer, KafkaProducer
from common.conf import KAFKA_BOOTSTRAP_SERVERS, LOG_TOPIC_SAVE_LOG, LOG_TOPIC_SAVE_USER_ACTION, \
    SERVICE_TOPIC_REGISTER, SERVICE_TOPIC_UNREGISTER
from developer_module.handler.log import save_log, save_user_action

# auto import with build_proto.py
from developer_module.handler.developer import DeveloperHandler
from developer_module.handler.log import LogHandler
from developer_module.handler.redis import RedisHandler
from developer_module.rpc import developer_pb2_grpc
# end auto

_SERVICE_ID = uuid()
_MODULE = 'developer'
_HOST = 'localhost'
_PORT = '8081'

content = {
    'service_id': _SERVICE_ID,
    'service_name': _MODULE,
    'host': _HOST,
    'port': _PORT
}
content = json.dumps(content).encode()
developer_producer = KafkaProducer(bootstrap_servers=KAFKA_BOOTSTRAP_SERVERS)


def serve():
    # peewee sql 日志
    # import logging
    # logger = logging.getLogger('peewee')
    # logger.setLevel(logging.DEBUG)
    # logger.addHandler(logging.StreamHandler())

    grpc_server = grpc.server(futures.ThreadPoolExecutor(max_workers=4))

    # 从这里开始是build_proto.py 自动添加
    developer_pb2_grpc.add_DeveloperServicer_to_server(DeveloperHandler(), grpc_server)
    developer_pb2_grpc.add_LogServicer_to_server(LogHandler(), grpc_server)
    developer_pb2_grpc.add_RedisServicer_to_server(RedisHandler(), grpc_server)
    # 自动添加结束

    grpc_server.add_insecure_port(_HOST + ':' + _PORT)
    grpc_server.start()

    try:
        while True:
            future = developer_producer.send(SERVICE_TOPIC_REGISTER, content)
            future.get(timeout=120)
            time.sleep(SERVICE_TIME_INTERVAL)

    except KeyboardInterrupt:
        unregister_content = {
            'service_id': _SERVICE_ID,
            'service_name': _MODULE,
        }
        unregister_content = json.dumps(unregister_content).encode()
        future = developer_producer.send(SERVICE_TOPIC_UNREGISTER, unregister_content)
        future.get(timeout=120)
        grpc_server.stop(0)
        os.kill(os.getpid(), signal.SIGKILL)


@async_task
def start_kafka():
    developer_consumer = KafkaConsumer(LOG_TOPIC_SAVE_LOG,
                                       LOG_TOPIC_SAVE_USER_ACTION,
                                       bootstrap_servers=KAFKA_BOOTSTRAP_SERVERS)
    while True:
        for msg in developer_consumer:
            if msg.topic == LOG_TOPIC_SAVE_LOG:
                save_log(param=eval(msg.value))
            elif msg.topic == LOG_TOPIC_SAVE_USER_ACTION:
                save_user_action(param=eval(msg.value))


if __name__ == '__main__':
    start_kafka()
    serve()
