import asyncio
import json
import time
from typing import List, Set

from bson import ObjectId
from loguru import logger
from pymongo.errors import PyMongoError

import schemas
from exceptions import FieldCheckError, ModelReferenceError
from ioservice.activity import find_activities_by_query, count_activities, bulk_insert_activities, \
    bulk_update_activities, aggregate_activities, aggregate_activities_raw, delete_many_activities_by_ids
from ioservice.common.basic import mdb
from ioservice.common.mongo import lookup_oid_reference_multi_fields_same_type, lookup_oid_reference, aggregate_docs, \
    find_many_docs, find_many_models, find_mono_docs
from ioservice.const import institution_col, business_col, activity_col, activity_register_col, activity_institution_col
from ioservice.institution import check_institution_exist_by_ids, sync_institutions_data
from models.activity import Activity
from models.business import Business
from models.institution import Institution
from task_utils.task_es import job_update_activity, job_update_activity_contacts
from task_utils.task_jobs import job_join_act_contacts, job_update_ranking, job_update_ranking_v2
from tasks.computing import alumni as alumni_computing
from tasks.side_effect.after_delete import clear_activity_references
from utils.dbutil import get_sort_stages_for_order, get_lookup_stages, LookupConfig, convert2sort, shell_sort
from utils.jsonutil import JsonCustomEncoder
from loguru import logger
from utils.regionutil import get_region_area_mapping_stage


def build_agg_pipeline(query, params: schemas.SearchParam):
    pipeline = [{"$match": query}]
    pipeline.extend(get_sort_stages_for_order(params.sort))
    pipeline.extend([
        {"$skip": params.offset}, {"$limit": params.size}
    ])
    return pipeline


async def search_by_find(query, params: schemas.SearchParam):
    items = await find_activities_by_query(query, params.sort, params.offset, params.size, None)
    fields_mapping = {"institution_id": "institution", "participate_insts": "participates",
                      "co_inst_ids": "co_institutions"}
    await asyncio.gather(
        lookup_oid_reference_multi_fields_same_type(
            items, fields_mapping, institution_col, Institution, ["institution_name", "is_deleted"]
        ),
        lookup_oid_reference(items, "business_id", "business", business_col, Business,
                             ["name", "is_deleted", "first_type", "second_type", "third_type", "priority"])
    )
    return items


async def search_by_agg(query, params: schemas.SearchParam):
    pipeline = build_agg_pipeline(query, params)
    lookup_configs = [
        LookupConfig(from_="business", local_field="business_id", foreign_field="_id", as_field="business",
                     fields=["name", "is_deleted", "first_type", "second_type", "third_type", "priority"],
                     ref_type="object"),
        LookupConfig(from_="institution", local_field="institution_id", foreign_field="_id", as_field="institution",
                     fields=["institution_name", "is_deleted"], ref_type="object"),
        LookupConfig(from_="institution", local_field="participate_insts", foreign_field="_id", as_field="participates",
                     fields=["institution_name", "is_deleted"], ref_type="list"),
        LookupConfig(from_="institution", local_field="co_inst_ids", foreign_field="_id", as_field="co_institutions",
                     fields=["institution_name", "is_deleted"], ref_type="list"),
    ]
    pipeline.extend(get_lookup_stages(lookup_configs))
    # print(json.dumps(pipeline, ensure_ascii=False, indent=4))
    items = await aggregate_activities(pipeline)
    return items


async def search_by_agg_auditing(query, skip=0, limit=20, sort=None):
    _sort = ["-_id"]
    if sort:
        _sort.extend(sort)
    convert2sort(sort)

    query = {"is_deleted": {"$ne": True}} if not query else query
    # fixme sort

    act_docs = await find_mono_docs(activity_col, query=query, sort=_sort, offset=skip, size=limit)
    return act_docs


async def do_search(query, params: schemas.SearchParam, special_order_fields: Set):
    if params.sort and params.sort[0] in special_order_fields:
        return await search_by_agg(query, params)
    return await search_by_find(query, params)


async def search_activity_by_param(params: schemas.SearchParam):
    if params.current:
        params.initial_pagination()
    # 为前台的搜索服务
    if params.search_func == "activity_compose":  # TODO search_func 需要有一个集中配置的地方
        compose_searcher = ActivityComposeSearcher(params)
        return await compose_searcher.search()
    # 为后台的搜索服务
    special_order_fields = {"start_time", "end_time", "people_num"}
    query = params.get_mongo_query(schemas.ActivityFilter)
    total = await count_activities(query)
    items = await do_search(query, params, special_order_fields) if params.size and total else []
    return schemas.SearchResult(items=items, total=total)


async def search_activity_by_param_with_user(params: schemas.SearchParam, perm_ids):
    """用户权限过滤活动列表"""
    if params.current:

        params.initial_pagination()
    # 为前台的搜索服务
    if params.search_func == "activity_compose":  # TODO search_func 需要有一个集中配置的地方
        compose_searcher = ActivityComposeSearcher(params)
        return await compose_searcher.search()

    # 为后台的搜索服务
    special_order_fields = {"start_time", "end_time", "people_num"}
    query = params.get_mongo_query(schemas.ActivityFilter) or {}

    # 处理其他的关系
    if "institution_id" in query:
        _institution_id = query.get("institution_id")
        query["$and"] = [{"$or": [{"institution_id": _institution_id},
                                  {"co_inst_ids": _institution_id}]}]
        del query['institution_id']
    if perm_ids:
        cond_ = {"$or": [
            {"institution_id": {"$in": [ObjectId(_id) for _id in perm_ids]}},
            {"co_inst_ids": {"$in": [ObjectId(_id) for _id in perm_ids]}}
        ]}
        if "$and" in query:
            query['$and'].append(cond_)
        else:
            query['$and'] = [cond_]

    sort = None
    if params.sort:
        sort = shell_sort(params.sort)

    col_act = mdb.get_collection(activity_col)
    act_docs = col_act.find(query, {"_id": 1})

    act_ids = []
    async for act in act_docs:
        act_ids.append(act["_id"])
    total = len(set(act_ids))
    col_reg = mdb.get_collection(activity_register_col)
    contacts = await col_reg.count_documents({"activity_id": {"$in": act_ids}, "is_attended": True})

    activities_list = await search_by_agg_auditing(query, skip=params.offset, limit=params.size, sort=sort)
    update_act_ids = []
    for d in activities_list:
        d["id"] = str(d['_id'])
        update_act_ids.append(d["id"])
        del d['_id']
    # 写入mong
    job_join_act_contacts.apply_async(args=[update_act_ids], countdown=40, expires=70)

    job_update_ranking.apply_async(countdown=60, expires=120)
    job_update_ranking_v2.apply_async(countdown=90, expires=150)
    return schemas.SearchResult(items=activities_list, total=total,
                                counts=contacts)


async def get_activity_list(params, perm_ids):
    if params.current:

        params.initial_pagination()
    # 为前台的搜索服务
    if params.search_func == "activity_compose":  # TODO search_func 需要有一个集中配置的地方
        compose_searcher = ActivityComposeSearcher(params)
        return await compose_searcher.search()

    # 为后台的搜索服务
    special_order_fields = {"start_time", "end_time", "people_num"}
    query = params.get_mongo_query(schemas.ActivityFilter) or {}

    # 处理其他的关系
    if "institution_id" in query:
        _institution_id = query.get("institution_id")
        query["$and"] = [{"$or": [{"institution_id": _institution_id},
                                  {"co_inst_ids": _institution_id}]}]
        del query['institution_id']
    if perm_ids:
        cond_ = {"$or": [
            {"institution_id": {"$in": [ObjectId(_id) for _id in perm_ids]}},
            {"co_inst_ids": {"$in": [ObjectId(_id) for _id in perm_ids]}}
        ]}
        if "$and" in query:
            query['$and'].append(cond_)
        else:
            query['$and'] = [cond_]

    col_act = mdb.get_collection(activity_col)
    act_docs = col_act.find(query, {"_id": 1})

    act_ids = []
    async for act in act_docs:
        act_ids.append(act["_id"])

    activities_list = await search_by_agg_auditing(query, skip=params.offset, limit=len(set(act_ids)))
    return activities_list


async def search_institution_count_by_param_with_user(params: schemas.SearchParam, perm_ids):
    """用户权限过滤活动列表"""
    activities_list = await get_activity_list(params, perm_ids)
    return sum([
        item.get('institution_num', 0)
        for item in activities_list
    ])


async def crud_activity_by_alumni(person, params: schemas.SearchParam):
    """act_reg -> act ->inst
    人才检索关联的活动列表"""
    col = mdb.get_collection(activity_register_col)
    activity_ids = await col.find({"person_id": ObjectId(person)}).distinct("activity_id")

    query = params.get_mongo_query(schemas.ActivityFilter) or {}

    if not activity_ids:
        return schemas.SearchResult(items=[], total=0,
                                    counts=0)

    query["_id"] = {"$in": [ObjectId(_id) for _id in activity_ids]}

    if "institution_id" in query:
        _institution_id = query.get("institution_id")
        query["$and"] = [{"$or": [{"institution_id": _institution_id},
                                  {"co_inst_ids": _institution_id}]}]
        del query['institution_id']

    sort = None
    if params.sort:
        sort = shell_sort(params.sort)
    total = await count_activities(query)
    activities_list = await search_by_agg_auditing(query, skip=params.offset, limit=params.size, sort=sort)
    #
    for act in activities_list:
        act["id"] = str(act["_id"])
    return schemas.SearchResult(items=activities_list, total=total)

async def crud_activity_by_institution(institution, params: schemas.SearchParam):
    """act_reg -> act ->inst
    机构检索关联的活动列表"""
    col = mdb.get_collection(activity_institution_col)
    activity_ids = await col.find({"institution_id": ObjectId(institution)}).distinct("activity_id")

    query = params.get_mongo_query(schemas.ActivityFilter) or {}

    if not activity_ids:
        return schemas.SearchResult(items=[], total=0,
                                    counts=0)

    query["_id"] = {"$in": [ObjectId(_id) for _id in activity_ids]}

    if "institution_id" in query:
        _institution_id = query.get("institution_id")
        query["$and"] = [{"$or": [{"institution_id": _institution_id},
                                  {"co_inst_ids": _institution_id}]}]
        del query['institution_id']

    sort = None
    if params.sort:
        sort = shell_sort(params.sort)
    total = await count_activities(query)
    activities_list = await search_by_agg_auditing(query, skip=params.offset, limit=params.size, sort=sort)
    #
    for act in activities_list:
        act["id"] = str(act["_id"])
    return schemas.SearchResult(items=activities_list, total=total)



async def bulk_check_activities(activities: List[Activity]):
    errors = []
    business_ids, insts_ids, participate_ids, co_inst_ids = set(), set(), set(), set()
    for a in activities:
        # if a.business_id:
        #     business_ids.add(str(a.business_id))
        if a.institution_id:
            insts_ids.add(str(a.institution_id))
        if a.participate_insts:
            participate_ids.update((str(i) for i in a.participate_insts))
        if a.co_inst_ids:
            co_inst_ids.update((str(i) for i in a.co_inst_ids))
    tasks = {
        # "business_check": asyncio.create_task(check_businesses_exist_by_ids(list(business_ids))),
        "insts_check": asyncio.create_task(
            check_institution_exist_by_ids(list(insts_ids | participate_ids | co_inst_ids))),
    }

    # missing_businesses = await tasks['business_check']
    # if missing_businesses:
    #     errors.append(
    #         FieldCheckError(field="business_id", error=f"There are non existent ids: {', '.join(missing_businesses)}"))

    missing_insts = await tasks['insts_check']
    if missing_insts:
        missing_insts = set(missing_insts)
        missing_hosts = insts_ids & missing_insts
        missing_participate = participate_ids & missing_insts
        missing_co_insts = co_inst_ids & missing_insts
        if missing_hosts:
            errors.append(FieldCheckError(field="institution_id",
                                          error=f"There are non existent ids: {', '.join(missing_insts)}"))
        if missing_participate:
            errors.append(FieldCheckError(field="participate_insts",
                                          error=f"There are non existent ids: {', '.join(missing_participate)}"))
        if missing_co_insts:
            errors.append(FieldCheckError(field="co_inst_ids",
                                          error=f"There are non existent ids: {', '.join(missing_co_insts)}"))
    if errors:
        raise ModelReferenceError("activity", details=errors)


async def bulk_insert_activities_with_checking(activities: List[Activity]):
    await bulk_check_activities(activities)
    return await bulk_insert_activities(activities)


async def bulk_update_activities_with_checking(activities: List[Activity]):
    await bulk_check_activities(activities)
    old_institutions = await get_old_institutions(activities)
    result = await bulk_update_activities(activities)
    asyncio.ensure_future(after_alter_activities(activities, old_institutions))
    return result


async def get_old_institutions(activities: List[Activity]):
    query = {"_id": {"$in": [a.id for a in activities]}}
    activities = await find_activities_by_query(query, 0, 0, fields=["institution_id"])
    institution_ids = {a.institution_id for a in activities}
    return institution_ids


async def after_alter_activities(activities: List[Activity], old_institutions: set):
    activity_ids, institution_ids = [], set()
    for a in activities:
        update_fields = set(list(a.dict(exclude_unset=True).keys()))
        if update_fields & {"business_id", "duration"}:
            activity_ids.append(a.id)
        if 'institution_id' in update_fields and a.institution_id and a.institution_id not in old_institutions:
            institution_ids.add(a.institution_id)
    if activity_ids:
        logger.info(f"after_alter_activities::update_alumni_computing_fields activities number: {len(activity_ids)}")
        await alumni_computing.after_update_activity([str(i) for i in activity_ids])
    if old_institutions:  # 处理旧的机构
        logger.info(
            f"after_alter_activities::update old institutions computing fields, institutions number: {len(old_institutions)}")
        await sync_institutions_data([str(i) for i in old_institutions])
    if institution_ids:  # 处理新的机构
        logger.info(
            f"after_alter_activities::update new institutions computing fields, institutions number: {len(institution_ids)}")
        await sync_institutions_data([str(i) for i in institution_ids])


async def bulk_delete_activities(activity_ids):
    deleted_num = await delete_many_activities_by_ids(activity_ids)
    asyncio.ensure_future(clear_activity_references(activity_ids))
    return deleted_num


class ActivitySearchQueryBuilder(object):
    special_order_fields = {"start_time", "end_time", "people_num"}
    cross_col_fields = {"institution", "business", "participates", "co_institutions"}
    after_agg_project = [{"$project": {"count": 1, "term": "$_id", "_id": 0}}]
    # 说明：由于mongo不接受带点的字段（eg: region.area）作为聚合结果的键值，所以和聚合相关的字段和都用下划线方式（eg: region_area）
    # 所以搜索接受的字段也是下划线民命的方式
    agg2pipeline = {
        "region_area": [{"$group": {"_id": "$region.area", "count": {"$sum": 1}}}],
        "start_year": [{"$match": {"start_time": {"$exists": True, "$ne": None}}},
                       {"$group": {"_id": {"$year": "$start_time"}, "count": {"$sum": 1}}}],
        "institution_name": [{"$group": {"_id": "$institution.institution_name", "count": {"$sum": 1}}}],
        "business_first_type": [{"$project": {"first_type": "$business.first_type"}},
                                {"$unwind": {"path": "$first_type", "preserveNullAndEmptyArrays": False}},
                                {"$group": {"_id": "$first_type", "count": {"$sum": 1}}}]
    }
    filter2query_builder = {  # 跨表信息的搜索语句构建器配置
        "institution_name": lambda v: {"institution.institution_name": {"$regex": v}},
        "business_first_type": lambda v: {"business.first_type": v},
        "region_area": lambda v: {"region.area": v}
    }

    def __init__(self, params: schemas.SearchParam):
        self.params = params
        self.basic_fields_query = None
        self.cross_col_query = None
        self.__prepare_params()

    def __prepare_params(self):
        query = self.params.get_mongo_query(schemas.ActivityFilter)
        cross_col_query = {}
        for field, query_builder in self.filter2query_builder.items():
            if self.params.filter.get(field):
                cross_col_query.update(query_builder(self.params.filter[field]))

        self.basic_fields_query = query
        self.cross_col_query = cross_col_query

    def build_query_pipeline(self):
        pipeline = []
        if self.basic_fields_query:
            pipeline.append({"$match": self.basic_fields_query})
        pipeline.append(get_region_area_mapping_stage())
        lookup_configs = [
            LookupConfig(from_="business", local_field="business_id", foreign_field="_id", as_field="business",
                         fields=["name", "is_deleted", "first_type", "second_type", "third_type"], ref_type="object"),
            LookupConfig(from_="institution", local_field="institution_id", foreign_field="_id", as_field="institution",
                         fields=["institution_name", "is_deleted"], ref_type="object"),
            LookupConfig(from_="institution", local_field="participate_insts", foreign_field="_id",
                         as_field="participates",
                         fields=["institution_name", "is_deleted"], ref_type="list"),
            LookupConfig(from_="institution", local_field="co_inst_ids", foreign_field="_id",
                         as_field="co_institutions",
                         fields=["institution_name", "is_deleted"], ref_type="list"),
        ]
        pipeline.extend(get_lookup_stages(lookup_configs))
        if self.cross_col_query:
            pipeline.append({"$match": self.cross_col_query})

        if self.params.sort:
            if self.params.sort[0] in self.special_order_fields:
                pipeline.extend(get_sort_stages_for_order(self.params.sort))
            else:
                pipeline.append({"$sort": convert2sort(self.params.sort, is_shell=True)})
        pipeline.extend([
            {"$skip": self.params.offset}, {"$limit": self.params.size}
        ])
        return pipeline

    def build_statistic_pipeline(self):
        pipeline = []
        if self.basic_fields_query:
            pipeline.append({"$match": self.basic_fields_query})
        pipeline.append(get_region_area_mapping_stage())
        lookup_configs = [
            LookupConfig(from_="business", local_field="business_id", foreign_field="_id", as_field="business",
                         fields=["name", "first_type"], ref_type="object"),
            LookupConfig(from_="institution", local_field="institution_id", foreign_field="_id", as_field="institution",
                         fields=["institution_name"], ref_type="object"),
        ]
        pipeline.extend(get_lookup_stages(lookup_configs))
        if self.cross_col_query:
            pipeline.append({"$match": self.cross_col_query})
        facet = {"overview": [{"$group": {"_id": None, "count": {"$sum": 1}}}]}
        for agg in self.params.aggs:
            if agg in self.agg2pipeline:
                facet[agg] = self.agg2pipeline[agg] + self.after_agg_project
        pipeline.append({"$facet": facet})
        return pipeline


class StatsResultHelper(object):
    null_value = {None, "无", "其他"}

    @classmethod
    def sort_func(cls, item: schemas.AggregateItem):
        return item.term not in cls.null_value, item.count

    @classmethod
    def sort_by_term(cls, item: schemas.AggregateItem):
        return item.term not in cls.null_value, item.term

    @classmethod
    def warp_stats_item(cls, stats_item: dict):
        item = schemas.AggregateItem.parse_obj(stats_item)
        if item.term in cls.null_value:
            item.term = "其他"
        return item

    @classmethod
    def warp_stats_result(cls, stats_result: dict):
        results = []
        for k, v in stats_result.items():
            if k == "overview":
                continue
            agg_result = schemas.AggregateResult(name=k, items=[cls.warp_stats_item(o) for o in v])
            if k == "start_year":
                sort_func = cls.sort_by_term
            else:
                sort_func = cls.sort_func
            agg_result.items.sort(key=sort_func, reverse=True)
            results.append(agg_result)
        return results

    @classmethod
    def get_total(cls, stats_result: dict):
        if not stats_result.get('overview'):
            return 0
        return stats_result['overview'][0]['count']


class ActivityComposeSearcher(object):
    def __init__(self, params: schemas.SearchParam):
        self.params = params
        self.query_builder = ActivitySearchQueryBuilder(self.params)

    async def search(self):
        query_pipeline = self.query_builder.build_query_pipeline()
        stats_pipeline = self.query_builder.build_statistic_pipeline()
        # print(json.dumps(query_pipeline, ensure_ascii=False))
        try:
            tasks = {
                "query": asyncio.create_task(aggregate_activities(query_pipeline)) if self.params.size else None,
                "stats": asyncio.create_task(aggregate_activities_raw(stats_pipeline))
            }
            stats_result = await tasks['stats']
            stats_result = stats_result[0]
            return schemas.SearchResult(
                items=await tasks['query'] if tasks['query'] else [],
                total=StatsResultHelper.get_total(stats_result),
                aggregation=StatsResultHelper.warp_stats_result(stats_result)
            )
        except PyMongoError as e:
            logger.error(f"ActivityComposeSearcher::get mongo error: {e}::"
                         f"query_pipeline: {json.dumps(query_pipeline, ensure_ascii=False, cls=JsonCustomEncoder)}, "
                         f"stats_pipeline: {json.dumps(stats_pipeline, ensure_ascii=False, cls=JsonCustomEncoder)}")
            raise e
