import json
import logging
from django.conf import settings
from kafka import KafkaConsumer
from storage.rawdata.bypass_proxy import BypassProxyRawDataStorage
from event_rule.utils import transform_event
from preprocessor.adapter import EventPreprocessorAdapter

_LOGGER = logging.getLogger(__name__)

CONSUMER_TIMEOUT_MS = 200000
BATCH_LIMIT = 5000
KAFKA_CONSUME_CONFIG = {
    'loki': {'time_out_ms': 30000, 'batch_limit': 500},
    'witch': {'time_out_ms': 30000, 'batch_limit': 500},
    'dark3': {'time_out_ms': 200000, 'batch_limit': 5000},
    'dark4': {'time_out_ms': 200000, 'batch_limit': 5000},
    'agent': {'time_out_ms': 30000, 'batch_limit': 500},
    'offline_calculation': {'time_out_ms': 15000, 'batch_limit': 50},
    'mf_pay': {'time_out_ms': 15000, 'batch_limit': 50}
}
DC_CONSUMER_TIMEOUT_MS = 30000
DC_BATCH_LIMIT = 500


def get_compatible_event_data(message):
    compatible_target = ['aid', 'ip', 'cvc', 'chn', 'user_id', 's2s_appid', 'device_type']
    compatible_event_data = {}
    for key, value in message.items():
        if key in compatible_target:
            compatible_event_data['_' + key] = value
        else:
            compatible_event_data[key] = value
    message = compatible_event_data
    return message


def load_json(data):
    try:
        json_data = json.loads(data.decode('utf-8'))
        return json_data
    except Exception as exception:
        _LOGGER.exception("load json: %s data: %s", exception, data.decode('utf-8'))
    return {}


def start(topic):
    # enable_auto_commit 设置为false，设置为手动commit offset模式
    # consumer_timeout_ms 如果当前比较空闲时，不用一直阻塞等待，跳出等待提交部分批量数据
    group_id = "{}_{}".format(settings.ES_KAFKA_GROUP_PREFIX, topic)
    consumer = KafkaConsumer(topic, group_id=group_id, bootstrap_servers=settings.KAFKA_SERVERS,
                             value_deserializer=load_json, enable_auto_commit=False,
                             consumer_timeout_ms=KAFKA_CONSUME_CONFIG.get(
                                 topic, {}).get('time_out_ms', CONSUMER_TIMEOUT_MS))
    handler_adapter = EventPreprocessorAdapter(topic)
    app_download_storage = BypassProxyRawDataStorage('_'.join([topic, 's2s']))
    storage = BypassProxyRawDataStorage(topic)
    while True:
        batch_counter = 0
        for each in consumer:
            if 'message' not in each.value:
                raise KeyError('invalid data format')
            try:
                message = json.loads(each.value['message']) if not isinstance(
                    each.value['message'], dict) else each.value['message']
            except Exception as err:
                _LOGGER.exception("message: %s, err: %s", each.value['message'], err)
                continue
            message = get_compatible_event_data(message)
            event = transform_event(message, topic)
            events = handler_adapter.handle(event)
            if not events:
                continue
            if event['_event_id'] in ('app_download', 'qk_app_download'):
                if event['_event_id'] == 'app_download':
                    continue
                app_download_storage.add_batch(events)
            else:
                storage.add_batch(events)
            batch_counter += 1
            if batch_counter > KAFKA_CONSUME_CONFIG.get(topic, {}).get('batch_limit', BATCH_LIMIT):
                storage.execute_batch()
                app_download_storage.execute_batch()
                batch_counter = 0
                consumer.commit()
        app_download_storage.execute_batch()
        storage.execute_batch()  # 当接收的消息不足一次batch上限时先提交部分
        consumer.commit()
