import enum
import copy
from datetime import timedelta
import logging
from datetime import datetime
from django.conf import settings
from common.es_core.user_filter import UserFilter
from common.es_core.es_interface import es_search_interface

_LOGGER = logging.getLogger(__name__)

SECOND_PER_DAY = 86400


class ESOperation(enum.Enum):
    SUM = 'sum'
    VALUE_COUNT = 'value_count'
    CARDINALITY = 'cardinality'
    TOP_HITS = 'top_hits'
    HISTOGRAM = 'histogram'
    MAX = 'max'
    AVERAGE = 'avg'

    @classmethod
    def has_value(cls, value):
        return value in cls._value2member_map_


class ReportCreator:
    """
    報表生成方法，兩個方法：
    1. 只重跑value expr：調用reprocess_value_expr
    2. 生成報表：調用create_report
    """
    def __init__(self, start_time, end_time, event_rule, index, custom_filter=None, custom_not_filter=None,
                 dynamic_argument_dct=None, user_pool=None):
        """
        :param user_pool: list, target user pool
        :param start_time: datetime obj, start time of report
        :param end_time:  datetime obj, end time of report
        :param event_rule: dict, config for report
        :param index: str, target index to search and create report
        :param custom_filter: list, list of dsl filter
        :param custom_not_filter: list, list of dsl not filter
        :param dynamic_argument_dct: dict, allow dynamic argument to take care of weakness of config
        """
        self.user_pool = user_pool
        self.start_time = start_time
        self.end_time = end_time
        self.event_rule = event_rule
        self.index = index
        self.custom_filter = custom_filter
        self.custom_not_filter = custom_not_filter
        self.dynamic_argument_dct = dynamic_argument_dct
        assert 'values' in event_rule
        self.value_fields = [value_field for value_field in event_rule.get('values')
                             if _is_target_index(index, value_field.get('target_index'))]
        assert (start_time and end_time) or (not start_time and not end_time)
        self.base_time_range = [int(self.start_time.timestamp()), int(end_time.timestamp())] \
            if start_time and end_time else None

    def __parse_target_value(self, value_field):
        """
        解析单一字段获取方法
        :param value_field: 單一字段的配置
        :return: 字段block aggregation
        """
        operation = value_field.get('operation', ESOperation.SUM.value)
        assert ESOperation.has_value(operation)
        target_value = value_field['target_value']
        if operation == ESOperation.TOP_HITS.value:
            assert 'sort' in value_field
            sort = value_field['sort']
            aggregation = {ESOperation.TOP_HITS.value: {"size": 1, "sort": sort}}
        elif isinstance(target_value, list):
            assert operation == ESOperation.SUM.value
            source_script = ''
            last_field = None
            for field in target_value:
                if last_field and last_field not in ['+', '-', '*', '/']:
                    assert field in ['+', '-', '*', '/']
                else:
                    field = "doc.{}.value".format(field)
                source_script += field
                last_field = field
            aggregation = {operation: {"script": {"lang": "painless", "source": source_script}}}
        else:
            aggregation = {operation: {'field': target_value}}
            operation_dict = value_field.get(operation, {})
            if operation == ESOperation.HISTOGRAM.value:
                use_bounds = operation_dict.get('extended_bounds', False)
                if use_bounds:
                    operation_dict['extended_bounds'] = {'min': self.base_time_range[0],
                                                         'max': self.base_time_range[1] - 1}
            aggregation[operation].update(operation_dict)
        return aggregation

    def __get_global_time_range(self):
        """
        关注时间过滤
        :return: 獲取報表全局時間範圍
        """
        event_time_floor, event_time_ceiling = self.base_time_range[0], self.base_time_range[1]
        for value_field in self.value_fields:
            time_filters = self.__get_time_filters(value_field)
            for time_filter in time_filters:
                if '_event_time' in time_filter['range']:
                    if time_filter['range']['_event_time']['gte'] < event_time_floor:
                        event_time_floor = time_filter['range']['_event_time']['gte']
                    if time_filter['range']['_event_time']['lt'] > event_time_ceiling:
                        event_time_ceiling = time_filter['range']['_event_time']['lt']
        return event_time_floor, event_time_ceiling

    def __get_time_filters(self, item):
        time_filters = []
        base_event_time_filter = {'range': {"_event_time": {"gte": self.base_time_range[0],
                                                            "lt": self.base_time_range[1]}}}
        time_delta_dict = item.get('time_range', {})
        if not time_delta_dict:
            return [base_event_time_filter]
        for field, time_delta in time_delta_dict.items():
            time_filter = self.__get_time_filter_by_field(field, time_delta, self.base_time_range)
            if time_filter:
                time_filters.append(time_filter)
        if not time_filters or '_event_time' not in time_delta_dict:
            time_filters.append(base_event_time_filter)
        return time_filters

    @staticmethod
    def __get_time_filter_by_field(field_name, custom_time_delta, time_range):
        assert isinstance(custom_time_delta, list)
        assert len(custom_time_delta) in (0, 2)
        if not custom_time_delta:
            return None
        assert isinstance(custom_time_delta[0], (int, float)) or custom_time_delta[0] == '-'
        assert isinstance(custom_time_delta[1], (int, float)) or custom_time_delta[1] == '+'
        time_filter = {'range': {field_name: {}}}
        if custom_time_delta[0] == '-':
            time_filter['range'][field_name]['gte'] = 0
        else:
            time_filter['range'][field_name]['gte'] = time_range[0] + SECOND_PER_DAY * custom_time_delta[0]
        if custom_time_delta[1] == '+':
            time_filter['range'][field_name]['lt'] = datetime.now().timestamp()
        else:
            time_filter['range'][field_name]['lt'] = time_range[1] + SECOND_PER_DAY * custom_time_delta[1]
        assert time_filter['range'][field_name]['gte'] < time_filter['range'][field_name]['lt']
        return time_filter

    @staticmethod
    def __parse_dynamic_argument(value_filter, dynamic_argument_dct=None):
        """
        解析动态参数
        :param value_filter: 字段的過濾條件
        :param dynamic_argument_dct: 動態參數字典
        :return:
        """
        value_filter = value_filter or []
        if dynamic_argument_dct is None:
            return value_filter
        res_filter = []
        for each_filter in value_filter:
            for key, content in each_filter.items():
                if key == 'terms' and '$' in list(content.values())[0]:
                    target_key = list(content.values())[0]
                    target_key = target_key.replace('$', '')
                    target_key = target_key.lstrip('{').rstrip('}')
                    if dynamic_argument_dct[target_key] is None:
                        continue
                    res_filter.append({key: {list(content.keys())[0]: dynamic_argument_dct[target_key]}})
                else:
                    res_filter.append({key: content})
        return res_filter

    def __get_fields_aggs_dsl(self):
        field_aggs_dsl = dict()
        for value_field in self.value_fields:
            if 'value_expr' in value_field:
                continue
            if not _is_target_index(self.index, value_field.get('target_index')):
                continue
            value_id = value_field['value_id']
            report_name = "sum_{}".format(value_id)
            #### 单元字段过滤
            must_filter = self.__get_time_filters(value_field)
            must_filter += self.__parse_dynamic_argument(value_field.get('filter'), self.dynamic_argument_dct)
            not_filter = self.__parse_dynamic_argument(value_field.get('not_filter'), self.dynamic_argument_dct)
            field_filter = {
                "bool": {"must": must_filter, "must_not": not_filter}
            }
            aggs_dsl = self.__parse_target_value(value_field)
            if "terms" in value_field:
                aggs_unit = {report_name: {
                    'aggs': {"_sum_{}".format(value_id): {
                        "terms": value_field["terms"],
                        "aggs": {"__sum_{}".format(value_id): aggs_dsl}
                    }}
                }}
            else:
                aggs_unit = {report_name: {'aggs': {"_sum_{}".format(value_id): aggs_dsl}}}
            aggs_unit[report_name]['filter'] = field_filter
            field_aggs_dsl.update(aggs_unit)
        return field_aggs_dsl

    def __get_event_id_filters(self, global_time_range):
        """
        裁剪关注集合，縮減關注數據的集合大小
        :param global_time_range: list，[event_time floor, event_time ceiling]
        :return: 全局事件的過濾條件
        """
        target_event = self.event_rule.get('target_event_id', [])
        on_day_event_id = self.event_rule.get('on_day_event_id', [])
        for event_id in on_day_event_id:
            assert event_id in target_event
        global_time_constraint = {'range': {"_event_time": {"gte": global_time_range[0], "lt": global_time_range[1]}}}
        if not on_day_event_id:
            return [
                {"terms": {"_event_id": target_event}},
                global_time_constraint
            ]
        base_time_constraint = {'range': {"_event_time": {"gte": self.base_time_range[0],
                                                          "lt": self.base_time_range[1]}}}
        global_time_event = list(set(target_event) - set(on_day_event_id))
        event_id_filters = [
            {"bool": {"should": [{"bool": {"must": [{"terms": {"_event_id": global_time_event}},
                                                    global_time_constraint]}},
                                 {"bool": {"must": [{"terms": {"_event_id": on_day_event_id}},
                                                    base_time_constraint]}}]}}
        ]
        return event_id_filters

    def __process_es_query_result(self, aggs_data, result):
        """
        解析ElasticSearch返回数据
        :param aggs_data: es查詢 aggregation 結果
        :param result: 解析後數據
        :return:
        """
        for value_field in self.value_fields:
            if not _is_target_index(self.index, value_field.get('target_index')):
                continue
            if 'value_expr' in value_field:
                continue
            value_id = value_field['value_id']
            operation = value_field.get('operation', ESOperation.SUM.value)
            if operation == ESOperation.TOP_HITS.value:
                result[value_id] = aggs_data['sum_{}'.format(value_id)]['_sum_{}'.format(value_id)]['hits']['hits']
                result[value_id] = result[value_id][0] if result[value_id] else {}
            elif 'terms' in value_field:
                buckets = aggs_data['sum_{}'.format(value_id)]['_sum_{}'.format(value_id)]['buckets']
                items = []
                for v in buckets:
                    item = dict(key=v['key'], count=v['doc_count'], sum=v['__sum_{}'.format(value_id)]['value'])
                    items.append(item)
                result[value_id] = items
            elif operation == ESOperation.HISTOGRAM.value:
                buckets = aggs_data['sum_{}'.format(value_id)]['_sum_{}'.format(value_id)]['buckets']
                result[value_id] = len(buckets)
            else:
                value_data = aggs_data['sum_{}'.format(value_id)]['_sum_{}'.format(value_id)]['value']
                result[value_id] = value_data

    def __calculate_value_expr(self, value_field, result):
        """
        解析composition数据
        :param value_field:
        :param result:
        :return:
        """
        try:
            value_expr = value_field['value_expr']
            if '$' in value_expr and self.dynamic_argument_dct:
                value_expr = value_expr.replace('$', '')
                value_expr = value_expr.format(**self.dynamic_argument_dct)
            value_data = eval(value_expr, {"__builtins__": None},
                              {k: v for k, v in result.items()})
        except ZeroDivisionError:
            value_data = 0
        return value_data

    def __process_value_expr(self, result):
        for value_field in self.value_fields:
            if not _is_target_index(self.index, value_field.get('target_index')):
                continue
            if 'value_expr' in value_field:
                value_id = value_field['value_id']
                value_data = self.__calculate_value_expr(value_field, result)
                result[value_id] = value_data

    def __format_result(self, result):
        """
        報表數據結果加工
        :param result: 報表生成結果
        """
        for value_field in self.value_fields:
            if not _is_target_index(self.index, value_field.get('target_index')):
                continue
            if 'fmt' in value_field:
                fmt_lambda = value_field['fmt']
                value_id = value_field['value_id']
                try:
                    result[value_id] = fmt_lambda(result[value_id])
                except Exception as e:
                    _LOGGER.exception('fmt error %s', e)

    def process_aggregation_results(self, aggs_data):
        result = dict()
        self.__process_es_query_result(aggs_data, result)
        self.__process_value_expr(result)
        self.__format_result(result)
        return result

    def transform_group_by_rule_to_dsl(self, group_by, max_bucket, sum_up):
        if self.event_rule.get('dynamic_argument') is not None:
            assert self.dynamic_argument_dct is not None
        else:
            assert self.dynamic_argument_dct is None
        filter_list = []
        if self.custom_filter:
            filter_list += self.custom_filter
        if self.user_pool is not None:
            filter_list += [{"terms": {'_user_id': self.user_pool}}]
        value_aggregation_dsl = self.__get_fields_aggs_dsl()
        event_time_floor, event_time_ceiling = self.__get_global_time_range()
        global_time_range = [event_time_floor, event_time_ceiling]
        event_id_constraint = self.__get_event_id_filters(global_time_range)
        filter_list += event_id_constraint
        aggregation_dsl = {
            "query": {"bool": {"filter": filter_list}},
            "size": 0,
            "aggs": {
                "buckets_count": {"cardinality": {"field": group_by}},
                "report": {
                    "terms": {"field": group_by, "size": max_bucket},
                    "aggs": value_aggregation_dsl
                }
            }}
        if sum_up:
            aggregation_dsl['aggs'].update(value_aggregation_dsl)
        if self.custom_not_filter:
            aggregation_dsl["query"]["bool"]["must_not"] = self.custom_not_filter
        return aggregation_dsl

    def transform_rule_to_dsl(self):
        if self.event_rule.get('dynamic_argument'):
            assert self.dynamic_argument_dct is not None
        else:
            assert self.dynamic_argument_dct is None
        filter_list = []
        if self.custom_filter:
            filter_list += self.custom_filter
        if self.user_pool is not None:
            filter_list += [{"terms": {'_user_id': self.user_pool}}]
        value_aggregation_dsl = self.__get_fields_aggs_dsl()
        event_time_floor, event_time_ceiling = self.__get_global_time_range()
        global_time_range = [event_time_floor, event_time_ceiling]
        event_id_constraint = self.__get_event_id_filters(global_time_range)
        filter_list += event_id_constraint
        aggregation_dsl = {
            "query": {
                "bool": {"filter": filter_list}
            },
            "size": 0,
            "aggs": value_aggregation_dsl
        }
        if self.custom_not_filter:
            aggregation_dsl["query"]["bool"]["must_not"] = self.custom_not_filter
        return aggregation_dsl

    def create_event_group_by_report(self, max_bucket=50000, sum_up=None):
        assert 'group_by' in self.event_rule
        group_by = self.event_rule['group_by']
        aggregation_dsl = self.transform_group_by_rule_to_dsl(group_by=group_by, max_bucket=max_bucket, sum_up=sum_up)
        resp_json = es_search_interface(self.index, aggregation_dsl)
        buckets = resp_json['aggregations']['report']['buckets']
        resp_items = []
        for bucket in buckets:
            result = {group_by.replace('.keyword', ''): bucket['key']}
            result.update(self.process_aggregation_results(bucket))
            resp_items.append(result)
        return dict(list=resp_items, total_count=resp_json['aggregations']['buckets_count'].get('value', 0),
                    sum_up=self.process_aggregation_results(resp_json['aggregations']) if sum_up else {})

    def create_event_report(self):
        aggregation_dsl = self.transform_rule_to_dsl()
        resp_json = es_search_interface(self.index, aggregation_dsl)
        event_report = self.process_aggregation_results(resp_json['aggregations'])
        return event_report

    @classmethod
    def reprocess_value_expr(cls, target_report, event_rule, index, dynamic_argument_dct=None):
        creator = cls(start_time=None, end_time=None, event_rule=event_rule, index=index,
                      custom_filter=None, custom_not_filter=None,
                      dynamic_argument_dct=dynamic_argument_dct)
        creator.__process_value_expr(target_report)
        creator.__format_result(target_report)
        return target_report

    def _create_report(self, to_group, **kwargs):
        if to_group:
            return self.create_event_group_by_report(**kwargs)
        return self.create_event_report()

    @classmethod
    def create_report(cls, start_time, end_time, event_rule, index, custom_filter=None,
                      custom_not_filter=None, dynamic_argument_dct=None, user_pool=None, to_group=None,
                      **kwargs):
        report_creator = cls(user_pool=user_pool, start_time=start_time, end_time=end_time,
                             event_rule=event_rule,
                             index=index, custom_filter=custom_filter,
                             custom_not_filter=custom_not_filter,
                             dynamic_argument_dct=dynamic_argument_dct)
        return report_creator._create_report(to_group=to_group, **kwargs)


class UserReportCreator(ReportCreator):
    def __init__(self, start_time, end_time, event_rule, index,
                 custom_filter=None, custom_not_filter=None, dynamic_argument_dct=None, user_pool=None):
        super(UserReportCreator, self).__init__(
            user_pool=user_pool, start_time=start_time, end_time=end_time, event_rule=event_rule,
            index=index, custom_filter=custom_filter, custom_not_filter=custom_not_filter,
            dynamic_argument_dct=dynamic_argument_dct
        )
        self.base_report = None
        self.new_user_report = None
        self.next_user_report = None
        self.week_user_report = None
        self.veteran_user_report = None

    def process_user_report(self, group_by=None):
        assert self.base_report and self.new_user_report
        assert self.next_user_report and self.week_user_report and self.veteran_user_report
        assert isinstance(self.base_report, dict) and isinstance(self.new_user_report, dict) and isinstance(
            self.next_user_report, dict) and isinstance(self.week_user_report, dict) and isinstance(
            self.veteran_user_report, dict)
        if not group_by:
            self.base_report['new_user'] = self.new_user_report
            self.base_report['next_day'] = self.next_user_report
            self.base_report['week_user'] = self.week_user_report
            self.base_report['veteran_user'] = self.veteran_user_report
            return self.base_report
        new_user_report_dct = {item[group_by.replace('.keyword', '')]: item for item in self.new_user_report['list']}
        next_user_report_dct = {item[group_by.replace('.keyword', '')]: item for item in self.next_user_report['list']}
        week_user_report_dct = {item[group_by.replace('.keyword', '')]: item for item in self.week_user_report['list']}
        veteran_user_report_dct = {item[group_by.replace('.keyword', '')]: item
                                   for item in self.veteran_user_report['list']}
        report_list = []
        self.base_report['sum_up']['new_user'] = self.new_user_report['sum_up']
        self.base_report['sum_up']['next_day'] = self.next_user_report['sum_up']
        self.base_report['sum_up']['week_user'] = self.week_user_report['sum_up']
        self.base_report['sum_up']['veteran_user'] = self.veteran_user_report['sum_up']
        for report in self.base_report['list']:
            report['new_user'] = new_user_report_dct.get(report[group_by.replace('.keyword', '')], {})
            report['next_day'] = next_user_report_dct.get(report[group_by.replace('.keyword', '')], {})
            report['week_user'] = week_user_report_dct.get(report[group_by.replace('.keyword', '')], {})
            report['veteran_user'] = veteran_user_report_dct.get(report[group_by.replace('.keyword', '')], {})
            report_list.append(report)
        return dict(total_count=self.base_report['total_count'], list=report_list, sum_up=self.base_report['sum_up'])

    @classmethod
    def create_user_report(cls, start_time, end_time, event_rule, index,
                           custom_filter=None, custom_not_filter=None, dynamic_argument_dct=None,
                           user_pool=None, to_group=None, user_type_as='_registered_at', **kwargs):
        report_creator = cls(start_time=start_time, end_time=end_time,
                             event_rule=event_rule, index=index,
                             custom_filter=custom_filter, custom_not_filter=custom_not_filter,
                             dynamic_argument_dct=dynamic_argument_dct, user_pool=user_pool)
        base_filter = report_creator.custom_filter or []
        group_by = report_creator.event_rule.get('group_by') if to_group else None
        report_creator.base_report = report_creator._create_report(to_group=to_group, **kwargs)
        report_creator.custom_filter = copy.deepcopy(base_filter)
        report_creator.custom_filter.append({
            "bool": {
                "should": [
                    {"term": {"_event_id": "register"}},
                    {'range': {user_type_as: {
                        "gte": report_creator.start_time.timestamp(),
                        "lt": (report_creator.start_time + timedelta(days=1)).timestamp()
                    }}}
                ]
            }
        })
        report_creator.new_user_report = report_creator._create_report(to_group=to_group, **kwargs)
        report_creator.custom_filter = copy.deepcopy(base_filter)
        report_creator.custom_filter.append({'range': {user_type_as: {
            "gte": (report_creator.start_time - timedelta(days=1)).timestamp(),
            "lt": report_creator.start_time.timestamp()
        }}})
        report_creator.next_user_report = report_creator._create_report(to_group=to_group, **kwargs)
        report_creator.custom_filter = copy.deepcopy(base_filter)
        report_creator.custom_filter.append({'range': {user_type_as: {
            "gte": (report_creator.start_time - timedelta(days=6)).timestamp(),
            "lt": (report_creator.start_time - timedelta(days=1)).timestamp()
        }}})
        report_creator.week_user_report = report_creator._create_report(to_group=to_group, **kwargs)
        report_creator.custom_filter = copy.deepcopy(base_filter)
        report_creator.custom_filter.append({'range': {user_type_as: {
            "lt": (report_creator.start_time - timedelta(days=6)).timestamp()
        }}})
        report_creator.veteran_user_report = report_creator._create_report(to_group=to_group, **kwargs)
        return report_creator.process_user_report(group_by=group_by)


def _is_target_index(index_str: str, target_indices: list):
    if target_indices is None or index_str is None:
        return True
    indices = index_str.split(',')
    for idx in indices:
        if idx in target_indices:
            return True
    return False


def get_key_mapper_list(value_fields, index=None):
    key_mapper_list = []
    for value_field in value_fields:
        if not _is_target_index(index, value_field.get('target_index')):
            continue
        value_id = value_field['value_id']
        if value_field.get('hide_result'):
            continue
        item = {'key': value_id, 'name': value_field['value_name'],
                'value_type': value_field.get('value_type', 'long')
                if 'value_expr' not in value_field else value_field.get('value_type', 'float'),
                'is_percentage': value_field.get('is_percentage', False),
                'copy_from_if_zero': value_field.get('copy_from_if_zero'),
                'description': value_field.get('description', '')}
        key_mapper_list.append(item)
    return key_mapper_list


def add_attachment_to_report_config(report_config, attachment, attach_position=None):
    assert 'target_event_id' in attachment
    assert isinstance(attachment['target_event_id'], list)
    assert 'values' in attachment
    assert isinstance(attachment['values'], list)
    assert len(attachment.keys()) == 2
    assert isinstance(attachment['values'], list)
    config = copy.deepcopy(report_config)
    attachment_copy = copy.deepcopy(attachment)
    config['target_event_id'] = list(set(config['target_event_id'] + attachment_copy['target_event_id']))
    if attach_position:
        assert attach_position < len(config['values']) + 1
        config['values'] = (config['values'][0:attach_position] + attachment_copy['values'] +
                            config['values'][attach_position:])
    else:
        config['values'] = config['values'] + attachment_copy['values']
    return config


def generate_chn_filter_dsl(user_channel_ttls, banned_channels, banned_user_ids):
    ttl_map = {channel_ttl['channel']: channel_ttl['ttl'] for channel_ttl in user_channel_ttls} \
        if user_channel_ttls else {}
    banned_channel_map = {banned_channel['channel']: banned_channel['max_user_id'] for banned_channel in
                          banned_channels} if banned_channels else {}
    channel_list_set = set(ttl_map.keys()).union(banned_channel_map.keys())
    custom_filter = []
    for channel in channel_list_set:
        channel_specify_term = {'term': {'_chn.keyword': channel}}
        channel_terms = [channel_specify_term]
        if channel in ttl_map:
            ttl_filter = {'range': {'_active_days': {'lte': ttl_map[channel]}}}
            channel_terms.append(ttl_filter)
        if channel in banned_channel_map:
            banned_filter = {'range': {'_user_id': {'gt': banned_channel_map[channel]}}}
            channel_terms.append(banned_filter)
        channel_filter = {'bool': {
            'should': [
                {'bool': {'must': [{'term': {'_event_id': 'register'}}]}},
                {'bool': {'must': channel_terms}},
                {'bool': {'must_not': channel_specify_term}},
            ],
            "minimum_should_match": 1}
        }
        custom_filter.append(channel_filter)
    if banned_user_ids:
        banned_user_filter = {'bool': {'must_not': {'terms': {'_user_id': banned_user_ids}}}}
        custom_filter.append(banned_user_filter)
    return custom_filter


def get_daily_first_recharge(index, start_time, end_time):
    user_filter = UserFilter(merchant_name=index)
    user_filter.event_user_filter(['active'], start_time, end_time)
    user_filter.stage_filter(['recharge', 'active'], 'recharge_price', 'value_count',
                             start_time - timedelta(days=3650), start_time, None, 0)
    first_recharge_user = user_filter.get_user_pool()
    user_filter = UserFilter(merchant_name=index)
    user_filter.event_user_filter(['recharge'], start_time, end_time, custom_filter=[{"term": {"_is_new_user": 1}}])
    today_recharge_user = user_filter.get_user_pool()
    first_recharge_query_dsl = {
        "query": {
            "bool": {
                "filter": [
                    {"term": {"_event_id": "recharge"}},
                    {"terms": {"_user_id": first_recharge_user + today_recharge_user}},
                    {"range": {"_event_time": {"gte": start_time.timestamp(), "lt": end_time.timestamp()}}}
                ]
            }
        },
        "size": 10000,
        "sort": [
            {"_event_time": {"order": "asc"}}
        ]
    }
    res = es_search_interface(index, first_recharge_query_dsl)
    first_recharge_out_trans_id, user_pool = [], []
    for recharge in res.get('hits', {}).get('hits', []):
        if recharge['_source']['_user_id'] in user_pool:
            continue
        if index in settings.CP_INDEX:
            first_recharge_out_trans_id.append(recharge['_id'])
        else:
            first_recharge_out_trans_id.append(recharge['_source']['recharge_out_trans_id'])
        user_pool.append(recharge['_source']['_user_id'])
    daily_first_recharge = dict(
        first_recharge_user_pool=user_pool,
        first_recharge_out_trans_id=first_recharge_out_trans_id,
    )
    return daily_first_recharge
