import logging
import math
from pymongo import MongoClient
from django.conf import settings
from common.const import IntervalType
from common.utils.computing_priority import get_priority

_LOGGER = logging.getLogger(__name__)

BATCH_EXPORT_SIZE_LIMIT = 25000

MONGODB = MongoClient(settings.MONGO_ADDR)


def get_report_template(report_time, interval_type, custom_report_id=None, required_new_user_analysis=None):
    if custom_report_id is not None:
        _id = custom_report_id
    else:
        day = str(report_time).split(' ')[0] if interval_type.value == 'day' else str(report_time).split(':')[0]
        _id = '-'.join([day, interval_type.value])
    report_template = dict(_id=_id, updated_at=report_time, interval_type=interval_type.value,
                           day=str(report_time).split(' ')[0])
    if interval_type.value == IntervalType.HOUR.value:
        report_template['hour'] = report_time.hour
    if required_new_user_analysis is not None:
        report_template['new_user'] = {}
    return report_template


def insert_or_update(doc, database, collection=None):
    assert isinstance(doc, dict)
    assert database in (settings.DWC_INDEX + settings.CP_INDEX +
                        settings.AGENT_INDEX + settings.OFFLINE_CALCULATION + settings.MF_PAY_INDEX)
    MONGODB[database][collection].update_one({'_id': doc['_id']}, {'$set': doc}, True)


def paginator(query_dct):
    page = int(query_dct.get('page') or 1)
    size = int(query_dct.get('size') or 15)
    offset = (page - 1) * size
    return offset, size


def get_report(database, collection, condition, sort_key, page=1, size=20, disable_paginate=False,
               user_type=None, export_size_limit=10000, merge_report=False, **kwargs):
    if merge_report:
        assert 'event_rule' in kwargs and isinstance(kwargs['event_rule'], dict)
        return _get_aggregate_report(database, collection, condition, sort_key, page=page, size=size,
                                     disable_paginate=disable_paginate, user_type=user_type,
                                     export_size_limit=export_size_limit, **kwargs)
    return _get_report(database, collection, condition, sort_key, page=page, size=size,
                       disable_paginate=disable_paginate, user_type=user_type,
                       export_size_limit=export_size_limit)


def _get_report(database, collection, condition, sort_key, page=1, size=20, disable_paginate=False,
                user_type=None, export_size_limit=10000):
    assert database in settings.DWC_INDEX + settings.CP_INDEX + settings.QK_INDEX
    offset, size = paginator(dict(page=page, size=size))
    sort_direction = 1 if sort_key[0] != '-' else -1
    sort_key = sort_key if sort_key[0] != '-' else sort_key[1:]
    sort_list = [(sort_key, sort_direction), ('_id', -1)]
    if not disable_paginate:
        reports = MONGODB[database][collection].find(condition).sort(sort_list).skip(offset).limit(size)
    else:
        reports = MONGODB[database][collection].find(condition).sort(sort_list)
        if export_size_limit:
            reports = reports.limit(export_size_limit)
    total_count = MONGODB[database][collection].count(condition)
    if not user_type:
        return [report for report in reports], total_count
    resp_items = []
    for report in reports:
        if user_type not in report or not report[user_type]:
            continue
        report.update(report[user_type])
        resp_items.append(report)
    return resp_items, total_count


def is_number(s):
    try:
        float(s)
        return True
    except ValueError:
        pass
    try:
        import unicodedata
        unicodedata.numeric(s)
        return True
    except (TypeError, ValueError):
        pass
    return False


def _project_fmt(value_expr_arr, arr, project_dct):
    if len(value_expr_arr) < 3:
        arr.append("$" + value_expr_arr[0])
        return
    project_key = ""
    field = []
    for v in value_expr_arr:
        if is_number(v):
            field.append(float(v))
        elif '$' in v:
            field.append(arr[int(v[1:])])
        elif v == "+":
            project_key = '$add'
        elif v == "-":
            project_key = '$subtract'
        elif v == "*":
            project_key = '$multiply'
        elif v == "/":
            project_key = '$divide'
        else:
            if project_dct.get(v):
                field.append(project_dct.get(v))
            else:
                field.append("$" + v)
    if project_key == '$divide':
        switch = {
            "$switch": {
                "branches": [
                    {"case": {"$eq": [field[1], 0]}, "then": 0}
                ], "default": {project_key: [field[0], field[1]]}}
        }
        arr.append(switch)
    else:
        arr.append({project_key: [field[0], field[1]]})


def _get_project_arr(priority, project_dct):
    arr = []
    for v in priority:
        _project_fmt(v, arr, project_dct)
    return arr


def _create_aggregate_pipeline(condition, result_condition, sort_key, page, size, disable_paginate, user_type,
                               export_size_limit,
                               event_rule, time_interval_type, dynamic_argument_dct,
                               group_key, custom_group_by_field, skip=0):
    # result paginate
    offset, size = paginator(dict(page=page, size=size))
    if disable_paginate:
        offset, size = skip, export_size_limit if export_size_limit else size
    if sort_key in ('day', '-day') and time_interval_type == 'hour':
        sort_key += ',-hour'
    sort_key = sort_key.split(',')
    sort_dct = dict()
    for e in sort_key:
        sort_direction = 1 if e[0] != '-' else -1
        sort_key = e if e[0] != '-' else e[1:]
        sort_dct[sort_key] = sort_direction
    # group by and project
    if not group_key:
        _id = {'day': '$day'} if time_interval_type == 'day' else {'day': '$day', 'hour': '$hour'}
    else:
        _id = group_key if isinstance(group_key, dict) else '$' + group_key
    group_by_query_dct = {'$group': {'_id': _id, 'day': {'$first': '$day'}, 'hour': {'$first': '$hour'}}}
    project_by_query_dct = {'$project': {'_id': _id, 'day': '$day', 'hour': '$hour'}}
    for event_field in event_rule.get('values', []):
        if 'value_expr' not in event_field:
            key = '.'.join([user_type, event_field['value_id']]) if user_type else event_field['value_id']
            group_by_query_dct['$group'][event_field['value_id']] = {'$sum': "$" + key}
            project_by_query_dct['$project'][event_field['value_id']] = "$" + event_field['value_id']
        else:
            value_id = event_field['value_id']
            value_expr = event_field['value_expr']
            if '$' in value_expr and dynamic_argument_dct:
                value_expr = value_expr.replace('$', '')
                value_expr = value_expr.format(**dynamic_argument_dct)
            priority_order_arr = get_priority(value_expr)
            project_dct_arr = _get_project_arr(priority_order_arr, project_by_query_dct['$project'])
            project_by_query_dct['$project'].update({value_id: project_dct_arr[-1]})
    if custom_group_by_field:
        group_by_query_dct['$group'].update(custom_group_by_field)
        for key in custom_group_by_field.keys():
            project_by_query_dct['$project'].update({key: '$' + key})
    pipeline = [
        {"$match": condition},
        group_by_query_dct,
        project_by_query_dct,
        {"$match": result_condition if result_condition is not None else dict()},
        {'$facet': {'result': [{'$sort': sort_dct}, {'$skip': offset}, {'$limit': size}],
                    'pageInfo': [{'$group': {'_id': None, 'total_count': {'$sum': 1}}}]}}
    ]
    return pipeline


def _get_aggregate_report(database, collection, condition, sort_key, page=1, size=20,
                          disable_paginate=False, user_type=None, export_size_limit=10000,
                          event_rule=None, time_interval_type=None, dynamic_argument_dct=None,
                          group_key=None, custom_group_by_field=None, result_condition=None,
                          custom_value_by_field=None, del_field=None):
    assert database in settings.DWC_INDEX + settings.CP_INDEX + settings.MF_PAY_INDEX
    temp = 1
    if disable_paginate:
        temp = math.ceil(export_size_limit / BATCH_EXPORT_SIZE_LIMIT)
    reports = []
    total_count = 0
    for key in range(0, temp):
        skip = key * BATCH_EXPORT_SIZE_LIMIT
        export_size_limit = BATCH_EXPORT_SIZE_LIMIT
        aggregate_pipeline = _create_aggregate_pipeline(condition, result_condition, sort_key, page, size,
                                                        disable_paginate, user_type, export_size_limit,
                                                        event_rule, time_interval_type, dynamic_argument_dct,
                                                        group_key, custom_group_by_field, skip)
        query = MONGODB[database][collection].aggregate(aggregate_pipeline, allowDiskUse=True)
        query = query.next() if query.alive else {}
        batch_result = query.get('result', [])
        reports += batch_result
        count = query.get('pageInfo', [{}])[0].get('total_count') if len(query.get('pageInfo', [{}])) else 0
        total_count += count
    resp_items = []
    for report in reports:
        for event_field in event_rule.get('values', []):
            value_id = event_field['value_id']
            if 'fmt' in event_field:
                report[value_id] = event_field['fmt'](report[value_id])
            if not report[value_id]:
                report[value_id] = 0
        if custom_value_by_field:
            for key, val in custom_value_by_field.items():
                report[key] = val
        if del_field:
            for k in del_field:
                del report[k]
        resp_items.append(report)
    return resp_items, total_count
