from querier.esquerier import ElasticSearchQuerier

WEEKDAYS = ["周一", "周二", "周三", "周四", "周五", "周六", "周日"]

WEEKDAY = 'weekday'
HOUR = 'hour'


class WeiboTimeSlotAnalyzeQuerier(ElasticSearchQuerier):
    def __init__(self, es, index, doc_type):
        super(WeiboTimeSlotAnalyzeQuerier, self).__init__(es, index, doc_type)
        # self.nlp_service = nlp_service

    def _build_query(self, args):
        user_id = args.get('user_id', None)
        from_date = args.get('from', None)
        to_date = args.get('to', None)
        time_slot_type = args.get('time_slot_type', None)
        if not time_slot_type:
            time_slot_type = WEEKDAY

        if user_id is None:
            raise ValueError('"user_id" is needed.')

        query = self._genQuery(user_id, from_date, to_date, time_slot_type)
        return query, {}, {'user_id': user_id, 'from': from_date, 'to': to_date, 'time_slot_type': time_slot_type}

    def _build_result(self, es_result, param):
        # total = es_result['hits']['total']
        agg = es_result['aggregations']
        data = extractResult(agg, param['time_slot_type'])
        return {
            "times": decode(param['time_slot_type'], data['dates']),
            'user_id': param['user_id'],
            'from': param['from'],
            'to': param['to'],
            'time_slot_type': param['time_slot_type'],
            "values": {
                'doc_counts': data['doc_counts'],
                'sum_likes': data['sum_likes'],
                'avg_likes': data['avg_likes'],
                'max_likes': data['max_likes'],

                'sum_retweets': data['sum_retweets'],
                'avg_retweets': data['avg_retweets'],
                'max_retweets': data['max_retweets'],

                'sum_comments': data['sum_comments'],
                'avg_comments': data['avg_comments'],
                'max_comments': data['max_comments'],

                'sum_engagement': data['sum_engagement'],
                'avg_engagement': data['avg_engagement'],
                'max_engagement': data['max_engagement'],

            }

        }

    def _genQuery(self, user_id, from_date, to_date, time_slot_type):
        query = {
            "query": {
                "bool": {
                    "filter": [
                        {'term': {'user_id': user_id}},
                        {"range": {"publish_timestamp": {"from": from_date, "to": to_date}}}
                    ]
                }
            },
            "aggs": {
                "time_hist": {
                    "histogram": {
                        "script": {
                            'lang': "painless",
                            'inline': "doc['publish_timestamp'].date.getDayOfWeek()" if time_slot_type == WEEKDAY else "doc['publish_timestamp'].date.getHourOfDay()",
                        },
                        "interval": 1,
                        "order": {"_key": "asc"}
                    },
                    "aggs": {
                        "sum_likes": {"sum": {"field": "likes", "missing": 0}},
                        "max_likes": {"max": {"field": "likes", "missing": 0}},

                        "sum_retweets": {"sum": {"field": "retweets", "missing": 0}},
                        "max_retweets": {"max": {"field": "retweets", "missing": 0}},

                        "sum_comments": {"sum": {"field": "comments", "missing": 0}},
                        "max_comments": {"max": {"field": "comments", "missing": 0}},

                        "sum_engagement": {"sum": {"field": "sum_engagement", "missing": 0}},
                        "max_engagement": {"max": {"field": "sum_engagement", "missing": 0}},
                    }
                }
            },

            "size": 0
        }

        return query


def extractResult(agg, slot_type):
    buckets = agg['time_hist']['buckets']
    dates = range(0, 24) if slot_type == 'hour' else range(1, 8)
    values = [0] * len(dates)
    doc_counts = dict(zip(dates, values))

    sum_likes = dict(zip(dates, values))
    max_likes = dict(zip(dates, values))
    avg_likes = dict(zip(dates, values))
    sum_retweets = dict(zip(dates, values))
    max_retweets = dict(zip(dates, values))
    avg_retweets = dict(zip(dates, values))
    sum_comments = dict(zip(dates, values))
    max_comments = dict(zip(dates, values))
    avg_comments = dict(zip(dates, values))

    sum_engagement = dict(zip(dates, values))
    max_engagement = dict(zip(dates, values))
    avg_engagement = dict(zip(dates, values))

    for b in buckets:
        k = b['key']
        doc_counts[k] = b['doc_count']
        sum_likes[k] = b['sum_likes']['value']
        max_likes[k] = b['max_likes']['value']
        avg_likes[k] = 0 if b['doc_count'] == 0 else b['sum_likes']['value'] / b['doc_count']
        
        sum_retweets[k] = b['sum_retweets']['value']
        max_retweets[k] = b['max_retweets']['value']
        avg_retweets[k] = 0 if b['doc_count'] == 0 else b['sum_retweets']['value'] / b['doc_count']

        sum_comments[k] = b['sum_comments']['value']
        max_comments[k] = b['max_comments']['value']
        avg_comments[k] = 0 if b['doc_count'] == 0 else b['sum_comments']['value'] / b['doc_count']

        sum_engagement[k] = b['sum_engagement']['value']
        max_engagement[k] = b['max_engagement']['value']
        avg_engagement[k] = 0 if b['doc_count'] == 0 else b['sum_engagement']['value'] / b['doc_count']

    return {
        'doc_counts': [doc_counts[k] for k in dates],

        'sum_likes': [sum_likes[k] for k in dates],
        'avg_likes': [avg_likes[k] for k in dates],
        'max_likes': [max_likes[k] for k in dates],

        'sum_retweets': [sum_retweets[k] for k in dates],
        'avg_retweets': [avg_retweets[k] for k in dates],
        'max_retweets': [max_retweets[k] for k in dates],

        'sum_comments': [sum_comments[k] for k in dates],
        'avg_comments': [avg_comments[k] for k in dates],
        'max_comments': [max_comments[k] for k in dates],

        'sum_engagement': [sum_engagement[k] for k in dates],
        'avg_engagement': [avg_engagement[k] for k in dates],
        'max_engagement': [max_engagement[k] for k in dates],

        'dates': list(dates)
    }


def decode(time_slot_type, data):
    if time_slot_type == WEEKDAY:
        ret = [WEEKDAYS[i - 1] if i <= len(WEEKDAYS) else '' for i in data]
    else:
        ret = [d if len(d) == 2 else '0' + d for d in [str(i) for i in data]]
    return ret


class WeiboTimeSlotAnalyzeAllQuerier(ElasticSearchQuerier):
    def __init__(self, analyze_querier):
        super(WeiboTimeSlotAnalyzeAllQuerier, self).__init__(None, None, None)
        self.analyze_querier = analyze_querier

    def search(self, args):
        by_fields = args.get('by')
        user_id = args.get('user_id', None)
        from_date = args.get('from', None)
        to_date = args.get('to', None)

        if user_id is None:
            raise ValueError('"user_id" is needed.')

        if by_fields is None:
            by_fields = ['hour', 'weekday']
        res = {}
        for value in by_fields:
            a = {"user_id": user_id, "time_slot_type": value, "from": from_date, "to": to_date}
            res[value] = self.analyze_querier.search(a)

        return res

    def _build_query(self, args): pass

    def _build_result(self, es_result, param): pass
