import json
import logging
import os
import sys
import codecs
base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(base_dir)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "webservices.settings")
from event_rule.utils import transform_event
from storage.rawdata.bypass_proxy import BypassProxyRawDataStorage
from preprocessor.handleradapter import EventHandlerAdapter

_LOGGER = logging.getLogger(__name__)

CONSUMER_TIMEOUT_MS = 200000
BATCH_LIMIT = 5000
DC_CONSUMER_TIMEOUT_MS = 30000
DC_BATCH_LIMIT = 500


def get_compatible_event_data(message):
    compatible_target = ['aid', 'ip', 'cvc', 'chn', 'user_id', 's2s_appid', 'device_type']
    compatible_event_data = {}
    for key, value in message.items():
        if key in compatible_target:
            compatible_event_data['_' + key] = value
        else:
            compatible_event_data[key] = value
    message = compatible_event_data
    return message


def load_json(data):
    try:
        json_data = json.loads(data.decode('utf-8'))
        return json_data
    except Exception as exception:
        _LOGGER.exception("load json: %s data: %s", exception, data.decode('utf-8'))
    return {}


def start(topic, file_path):
    # enable_auto_commit 设置为false，设置为手动commit offset模式
    # consumer_timeout_ms 如果当前比较空闲时，不用一直阻塞等待，跳出等待提交部分批量数据
    handler_adapter = EventHandlerAdapter(topic)
    app_download_storage = BypassProxyRawDataStorage('_'.join([topic, 's2s']))
    storage = BypassProxyRawDataStorage(topic)
    batch_counter = 0
    with codecs.open(file_path, 'r', 'utf-8-sig') as file:
        lines = [l.strip() for l in file]
        for idx, line in enumerate(lines):
            message = json.loads(line) if not isinstance(line, dict) else line
            message = get_compatible_event_data(message)
            event = transform_event(message, topic)
            events = handler_adapter.handle(event)
            if not events:
                continue
            if event['_event_id'] == 'app_download':
                app_download_storage.add_batch(events)
            else:
                storage.add_batch(events)
            batch_counter += 1
            if batch_counter > BATCH_LIMIT:
                storage.execute_batch()
                app_download_storage.execute_batch()
                batch_counter = 0
        app_download_storage.execute_batch()
        storage.execute_batch()  # 当接收的消息不足一次batch上限时先提交部分


if __name__ == '__main__':
    start(topic='dark3', file_path='./data.json')

