# -*- encoding: utf-8 -*-
import logging
import requests
import json
from offline_calc.handler import OfflineCalculationHandler
from common.const import IntervalType
from common.timeutil import get_interval_end_pair
from common.dwc.friday import marketing_cached_handler as handler

_LOGGER = logging.getLogger(__name__)

config_url_map = {
    'dark3': 'http://friday.qqdia.com:9929/channel_process/config/',
    'dark4': 'http://www.hd108000.com:9929/channel_process/config/'
}


class Command(OfflineCalculationHandler):
    def __init__(self):
        super(Command, self).__init__()
        self.index_list = ['dark3', 'dark4']
        self.filtered_name_list = {
            handler.ExternalEnum.DailyS2s: [IntervalType.DAY],
            handler.ExternalEnum.DailyUser: [IntervalType.DAY, IntervalType.HOUR],
            handler.ExternalEnum.DailyDevice: [IntervalType.DAY],
            handler.ExternalEnum.DailySummary: [IntervalType.DAY]
        }
        self.summary_name = handler.ExternalEnum.DailySummary

    def add_arguments(self, parser):
        super(Command, self).add_arguments(parser)

    def handle(self, **kwargs):
        super(Command, self).handle(**kwargs)
        merchant = kwargs['merchant']
        target_event = kwargs['target_event']
        self.index_list = self.index_list if not merchant else [index for index in self.index_list if index == merchant]

        for index in self.index_list:
            filter_config = self.get_filter_config(index)
            for name, interval_type_list in self.filtered_name_list.items():
                for interval_type in interval_type_list:
                    if target_event and name.value != target_event:
                        continue
                    window_list = self.get_window_interval(interval_type.value)
                    for window in window_list:
                        start_time, end_time = get_interval_end_pair(interval_type, self.base_time, window)
                        time_range = [start_time, end_time]
                        print(index, name, interval_type.value, time_range)
                        try:
                            if self.summary_name.value != name.value:
                                self.pool.apipe(self.start_execution, handler.create_aggregation_data, interval_type,
                                                name.value, self.base_time, index, window,
                                                dict(idx=index, handler_name=name, time_range=time_range,
                                                     interval_type=interval_type, filter_config=filter_config))
                            self.pool.apipe(self.start_execution, handler.create_aggregation_data, interval_type, name.value,
                                            self.base_time, index, window,
                                            dict(idx=index, handler_name=name, interval_type=interval_type,
                                                 time_range=time_range))
                        except Exception as e:
                            _LOGGER.exception('ES Create Report Error')
        self.pool.close()
        self.pool.join()

    @staticmethod
    def get_filter_config(idx):
        assert idx in config_url_map.keys()
        url = config_url_map.get(idx)
        try:
            resp = requests.get(url=url)
            if resp.status_code == requests.codes.ok:
                data = json.loads(resp.content)
                if data.get('status') == 0:
                    _LOGGER.info('Get friday config success: {}'.format(resp.url))
                    return data.get("data", {})
                else:
                    _LOGGER.info("Get short link error code:%s message:%s",
                                 data.get('code'), data.get('message'))
            else:
                _LOGGER.error(resp.raise_for_status())
        except Exception as exception:
            _LOGGER.exception('requests error: %s', exception)
