import datetime
import json
import os
import sys
import traceback
from operator import itemgetter

from fastapi import APIRouter
from re_common.baselibrary.utils.basedir import BaseDir
from re_common.baselibrary.utils.basefile import BaseFile
from re_common.baselibrary.utils.core.mlamada import bools_string
from re_common.baselibrary.utils.core.requests_core import MsgCode

from apps.core.m_route import ContextIncludedRoute
from apps.core.return_info import InputInfoModel, ReturnInfo, SUCCESS
from apps.crawler_platform.core_api.models import SaveMongoModel, SaveModel, SelectMongoModel, ResetMongoModel, \
    SavePathModel
from apps.sql_app.mmongodb import Coll
from settings import get_settings

router = APIRouter(route_class=ContextIncludedRoute)

@router.post("/mongo_api/save_mongo")
async def save_mongo(input: InputInfoModel[SaveMongoModel]):
    """
    此方法为保存下载数据至mongo,逻辑大致如下:
    1.根据id生成规则组装id,查询mongo,若不存在,则保存为新纪录,若存在,进行更新操作
    2.mongodb里数据结构主要字段为
        new_data{}:新数据缓存字段,根据传入的data_op来判定是否将传入的data字段更新还是覆盖,data_op默认为0表更新
        latest_data{}:历史最后更新数据缓存字段,它等于history列表里日期最近的一条
        history[{}...]:历史数据保存字段,其大小由his_cnt字段决定
        当传入is_his='True'时,表明需要将new_data更新至history列表并且更新latest_data字段
    :param input:para{"id_list":["hh","jj"],"hh":"test","jj":"tag"},id_list为id组合顺序
    :param data:data{"down_data":{...},"sql_data":{...}} down_data：网页下载数据，sql_data：mysql 任务查询结果
    :return:
    """
    return_info = ReturnInfo()
    sets = get_settings()
    ids = input.data.id
    table = input.data.table
    para = input.data.para
    id_list = para["id_list"]
    is_his = input.data.is_his
    his_cnt = input.data.his_cnt
    data = input.data.data
    data_op = input.data.data_op
    id_ = "_".join([para[tmp] for tmp in id_list])
    try:
        mongo_conn = Coll.get_table_conn(table)  # 根据table名取得mongo链接
    except:
        return_info.status = bools_string(False)
        return_info.msg_code = MsgCode.MONGO_ERROR
        return_info.msg = "根据table获得mongo链接失败"
        return_info.data = {"err_except": traceback.format_exc()}
        return return_info.todict()
    cjip = await mongo_conn.find_one({"_id": id_})
    down_time = str(datetime.datetime.now())
    down_date = datetime.datetime.now().strftime("%Y%m%d")
    if cjip:
        if "path" in cjip.keys():
            try:
                cjip = json.loads(BaseFile.single_read_file(cjip["path"]))
            except:
                return_info.status = bools_string(False)
                return_info.msg_code = MsgCode.SERVER_ERROR
                return_info.msg = "获取本地磁盘存放数据失败"
                return_info.data = {}
                return return_info.todict()
        new_data = cjip["new_data"]
        history = cjip["history"]
        # 下面为用传入的data根据data_op决定是更新还是覆盖new_data
        if len(new_data) != 0:
            if data_op == 0:
                new_data.update(data)
            else:
                new_data = data
        else:
            new_data = data
        new_data["down_date"] = down_date
        new_data["down_time"] = down_time
        if is_his == 'True':
            his_item = next((item for item in history if item.get("down_date") == down_date), "")  # 得到history列表里当日的item
            if his_item != "":  # 当日已经保存过
                if data_op == 0:  # 用new_data更新history里当日的item
                    his_item.update(new_data)
                    last_data = his_item
                else:  # 用new_data覆盖history里当日的item
                    history.remove(his_item)
                    history.append(new_data)
                    last_data = new_data
            else:  # 当日第一次保存
                if len(history) == his_cnt:  # 若history里历史记录数量超过his_cnt,则删除日期最远的一条
                    history.sort(key=itemgetter("down_date"), reverse=False)
                    history.pop(0)
                last_data = new_data
                history.append(last_data)
                history.sort(key=itemgetter("down_date"), reverse=True)
            cjip["last_data"] = last_data
            new_data = {}
        dicts = SaveModel.parse_obj(cjip).dict()
        dicts["new_data"] = new_data
        dicts["history"] = history
        dicts["update_time"] = down_time
        dicts["down_date"] = down_time
        save = SaveModel.parse_obj(dicts)
        save_dict = save.dict(by_alias=True, exclude={"id"})
        if get_size(save_dict) > sets.MONGO_SAVE_MAX_SIZE:
            save_cnt = 0
            while save_cnt < sets.MONGO_SAVE_TRY_CNT:
                try:
                    save_path = BaseFile.get_new_path(sets.MONGO_BASE_DIR, sets.MONGO_SAVE_PATH)
                    BaseDir.create_dir(save_path)
                    save_path = BaseFile.get_new_path(save_path, "{}.big_json".format(id_))
                    BaseFile.single_write_file(save_path, json.dumps(save_dict, ensure_ascii=False))
                    if BaseFile.is_file_exists(save_path):
                        break
                    save_cnt += 1
                except:
                    save_cnt += 1
            if save_cnt == sets.MONGO_SAVE_TRY_CNT:
                return_info.status = bools_string(False)
                return_info.msg_code = MsgCode.SERVER_ERROR
                return_info.msg = "保存数据至本地磁盘失败,重复次数:{}".format(sets.MONGO_SAVE_TRY_CNT)
                return_info.data = {}
                return return_info.todict()
            save = SavePathModel(
                _id=id_,
                path=save_path,
                update_time=str(datetime.datetime.now()),
                create_time=str(datetime.datetime.now())
            )
            save_dict = save.dict(by_alias=True, exclude={"id"})
        result = await mongo_conn.replace_one({'_id': id_}, save_dict)
        if result.matched_count != 1 and result.modified_count != 1:
            return_info.status = bools_string(False)
            return_info.msg_code = MsgCode.MONGO_ERROR
            return_info.msg = "更新mongo失败,匹配条数:{},影响条数:{}".format(result.matched_count, result.modified_count)
            return_info.data = {}
            return return_info.todict()
    else:
        last_data = {}
        history = []
        data["down_date"] = down_date
        data["down_time"] = down_time
        if is_his == 'False':
            new_data = data
        else:
            last_data = data
            history.append(last_data)
            new_data = {}
        save = SaveModel(
            _id=id_,
            para=para,
            last_data=last_data,
            new_data=new_data,
            history=history,
            update_time=str(datetime.datetime.now()),
            create_time=str(datetime.datetime.now())
        )
        save_dict = save.dict(by_alias=True, exclude_none=True)
        if get_size(save_dict) > sets.MONGO_SAVE_MAX_SIZE:
            save_cnt = 0
            while save_cnt < sets.MONGO_SAVE_TRY_CNT:
                try:
                    save_path = BaseFile.get_new_path(sets.MONGO_BASE_DIR,sets.MONGO_SAVE_PATH)
                    BaseDir.create_dir(save_path)
                    save_path = BaseFile.get_new_path(save_path,"{}.big_json".format(id_))
                    BaseFile.single_write_file(save_path,json.dumps(save_dict,ensure_ascii=False))
                    if BaseFile.is_file_exists(save_path):
                        break
                    save_cnt += 1
                except:
                    save_cnt += 1
            if save_cnt == sets.MONGO_SAVE_TRY_CNT:
                return_info.status = bools_string(False)
                return_info.msg_code = MsgCode.SERVER_ERROR
                return_info.msg = "保存数据至本地磁盘失败,重复次数:{}".format(sets.MONGO_SAVE_TRY_CNT)
                return_info.data = {}
                return return_info.todict()
            save = SavePathModel(
                _id=id_,
                path=save_path,
                update_time=str(datetime.datetime.now()),
                create_time=str(datetime.datetime.now())
            )
            save_dict = save.dict(by_alias=True, exclude_none=True)
        result = await mongo_conn.insert_one(save_dict)
        if result.inserted_id != id_:
            return_info.status = bools_string(False)
            return_info.msg_code = MsgCode.MONGO_ERROR
            return_info.msg = "插入mongo失败"
            return_info.data = {}
            return return_info.todict()
    return_info.status = SUCCESS
    return_info.msg_code = 200
    return_info.msg = "保存到mongodb成功"
    return_info.data = ""
    return return_info.todict()

async def save_mongo_20210611(input: InputInfoModel[SaveMongoModel]):
    """
    此方法为保存下载数据至mongo,逻辑大致如下:
    1.根据id生成规则组装id,查询mongo,若不存在,则保存为新纪录,若存在,进行更新操作
    2.mongodb里数据结构主要字段为
        new_data{}:新数据缓存字段,根据传入的data_op来判定是否将传入的data字段更新还是覆盖,data_op默认为0表更新
        latest_data{}:历史最后更新数据缓存字段,它等于history列表里日期最近的一条
        history[{}...]:历史数据保存字段,其大小由his_cnt字段决定
        当传入is_his='True'时,表明需要将new_data更新至history列表并且更新latest_data字段
    :param input:para{"id_list":["hh","jj"],"hh":"test","jj":"tag"},id_list为id组合顺序
    :param data:data{"down_data":{...},"sql_data":{...}} down_data：网页下载数据，sql_data：mysql 任务查询结果
    :return:
    """
    return_info = ReturnInfo()
    ids = input.data.id
    table = input.data.table
    para = input.data.para
    id_list = para["id_list"]
    is_his = input.data.is_his
    his_cnt = input.data.his_cnt
    data = input.data.data
    data_op = input.data.data_op
    id_ = "_".join([para[tmp] for tmp in id_list])
    try:
        mongo_conn = Coll.get_table_conn(table)  # 根据table名取得mongo链接
    except:
        return_info.status = bools_string(False)
        return_info.msg_code = MsgCode.MONGO_ERROR
        return_info.msg = "根据table获得mongo链接失败"
        return_info.data = {"err_except": traceback.format_exc()}
        return return_info.todict()
    cjip = await mongo_conn.find_one({"_id": id_})
    down_time = str(datetime.datetime.now())
    down_date = datetime.datetime.now().strftime("%Y%m%d")
    if cjip:
        new_data = cjip["new_data"]
        history = cjip["history"]
        # 下面为用传入的data根据data_op决定是更新还是覆盖new_data
        if len(new_data) != 0:
            if data_op == 0:
                new_data.update(data)
            else:
                new_data = data
        else:
            new_data = data
        new_data["down_date"] = down_date
        new_data["down_time"] = down_time
        if is_his == 'True':
            his_item = next((item for item in history if item.get("down_date") == down_date), "")  # 得到history列表里当日的item
            if his_item != "":  # 当日已经保存过
                if data_op == 0:  # 用new_data更新history里当日的item
                    his_item.update(new_data)
                    last_data = his_item
                else:  # 用new_data覆盖history里当日的item
                    history.remove(his_item)
                    history.append(new_data)
                    last_data = new_data
            else:  # 当日第一次保存
                if len(history) == his_cnt:  # 若history里历史记录数量超过his_cnt,则删除日期最远的一条
                    history.sort(key=itemgetter("down_date"), reverse=False)
                    history.pop(0)
                last_data = new_data
                history.append(last_data)
                history.sort(key=itemgetter("down_date"), reverse=True)
            cjip["last_data"] = last_data
            new_data = {}
        dicts = SaveModel.parse_obj(cjip).dict()
        dicts["new_data"] = new_data
        dicts["history"] = history
        dicts["update_time"] = down_time
        dicts["down_date"] = down_time
        save = SaveModel.parse_obj(dicts)
        result = await mongo_conn.replace_one({'_id': id_}, save.dict(by_alias=True, exclude={"id"}))
        if result.matched_count != 1 and result.modified_count != 1:
            return_info.status = bools_string(False)
            return_info.msg_code = MsgCode.MONGO_ERROR
            return_info.msg = "更新mongo失败,匹配条数:{},影响条数:{}".format(result.matched_count, result.modified_count)
            return_info.data = {}
            return return_info.todict()
    else:
        last_data = {}
        history = []
        ### data --> 文件目录 == path  new_data = {"down_date":2020,"down_time":2020,"path":path}
        data["down_date"] = down_date
        data["down_time"] = down_time
        if is_his == 'False':
            new_data = data
        else:
            last_data = data
            history.append(last_data)
            new_data = {}
        save = SaveModel(
            _id=id_,
            para=para,
            last_data=last_data,
            new_data=new_data,
            history=history,
            update_time=str(datetime.datetime.now()),
            create_time=str(datetime.datetime.now())
        )
        result = await mongo_conn.insert_one(save.dict(by_alias=True, exclude_none=True))
        if result.inserted_id != id_:
            return_info.status = bools_string(False)
            return_info.msg_code = MsgCode.MONGO_ERROR
            return_info.msg = "插入mongo失败"
            return_info.data = {}
            return return_info.todict()
    return_info.status = SUCCESS
    return_info.msg_code = 200
    return_info.msg = "保存到mongodb成功"
    return_info.data = ""
    return return_info.todict()


def get_size(obj, seen=None):
    size = sys.getsizeof(obj)
    if seen is None:
        seen = set()
    obj_id = id(obj)
    if obj_id in seen:
        return 0
    seen.add(obj_id)
    if isinstance(obj, dict):
        size += sum([get_size(v, seen) for v in obj.values()])
        size += sum([get_size(k, seen) for k in obj.keys()])
    elif hasattr(obj, '__dict__'):
        size += get_size(obj.__dict__, seen)
    elif hasattr(obj, '__iter__') and not isinstance(obj, (str, bytes, bytearray)):
        size += sum([get_size(i, seen) for i in obj])
    return size


@router.post("/mongo_api/reset_new_data")
async def reset_new_data(input: InputInfoModel[ResetMongoModel]):
    return_info = ReturnInfo()
    sets = get_settings()
    id_ = input.data.id
    table = input.data.table
    try:
        mongo_conn = Coll.get_table_conn(table)  # 根据table名取得mongo链接
    except:
        return_info.status = bools_string(False)
        return_info.msg_code = MsgCode.MONGO_ERROR
        return_info.msg = "根据table获得mongo链接失败"
        return_info.data = {"err_except": traceback.format_exc()}
        return return_info.todict()
    cjip = await mongo_conn.find_one({"_id": id_})
    if cjip:
        if "path" in cjip.keys():
            try:
                cjip = json.loads(BaseFile.single_read_file(cjip["path"]))
            except:
                return_info.status = bools_string(False)
                return_info.msg_code = MsgCode.SERVER_ERROR
                return_info.msg = "获取本地磁盘存放数据失败"
                return_info.data = {}
                return return_info.todict()
        new_data = cjip["new_data"]
        last_data = cjip["last_data"]
        if len(new_data) == 0 and len(last_data) > 0:
            dicts = SaveModel.parse_obj(cjip).dict()
            dicts["new_data"] = last_data
            save = SaveModel.parse_obj(dicts)
            save_dict = save.dict(by_alias=True, exclude={"id"})
            if get_size(save_dict) > sets.MONGO_SAVE_MAX_SIZE:
                save_cnt = 0
                while save_cnt < sets.MONGO_SAVE_TRY_CNT:
                    try:
                        save_path = BaseFile.get_new_path(sets.MONGO_BASE_DIR, sets.MONGO_SAVE_PATH)
                        BaseDir.create_dir(save_path)
                        save_path = BaseFile.get_new_path(save_path, "{}.big_json".format(id_))
                        BaseFile.single_write_file(save_path, json.dumps(save_dict, ensure_ascii=False))
                        if BaseFile.is_file_exists(save_path):
                            break
                        save_cnt += 1
                    except:
                        save_cnt += 1
                if save_cnt == sets.MONGO_SAVE_TRY_CNT:
                    return_info.status = bools_string(False)
                    return_info.msg_code = MsgCode.SERVER_ERROR
                    return_info.msg = "保存数据至本地磁盘失败,重复次数:{}".format(sets.MONGO_SAVE_TRY_CNT)
                    return_info.data = {}
                    return return_info.todict()
                save = SavePathModel(
                    _id=id_,
                    path=save_path,
                    update_time=str(datetime.datetime.now()),
                    create_time=str(datetime.datetime.now())
                )
                save_dict = save.dict(by_alias=True, exclude={"id"})
            result = await mongo_conn.replace_one({'_id': id_},save_dict)
            if result.matched_count != 1 and result.modified_count != 1:
                return_info.status = bools_string(False)
                return_info.msg_code = MsgCode.MONGO_ERROR
                return_info.msg = "更新mongo失败,匹配条数:{},影响条数:{}".format(result.matched_count, result.modified_count)
                return_info.data = {}
                return return_info.todict()
        else:
            return_info.status = bools_string(False)
            return_info.msg_code = MsgCode.MONGO_ERROR
            return_info.msg = "更新mongodb的new_data失败,new_data不为空或者latest_data为空"
            return_info.data = {}
            return return_info.todict()
    return_info.status = SUCCESS
    return_info.msg_code = 200
    return_info.msg = "更新mongodb的new_data成功"
    return_info.data = ""
    return return_info.todict()


@router.post("/mongo_api/select_mongo")
async def select_mongo(input: InputInfoModel[SelectMongoModel]):
    """
      通用查询
      :param field:{"new_data.html":1,"last_data":1} 需要返回的字段值
      :param table:查询的表名
      :param and_param:{"_id":"001","para.task_name":"test"} 用and拼接的查询条件
      :param or_param:[{"_id":"001"},{"_id":"002"}]用or拼接的查询条件
      :param order:[("create_time":1)]排序字段
      :param limit:int,限制返回数量
      :return:
    """
    return_info = ReturnInfo()
    field = input.data.field
    table = input.data.table
    and_param = input.data.and_param
    or_param = input.data.or_param
    order = input.data.order
    limit = input.data.limit
    try:
        mongo_conn = Coll.get_table_conn(table)
    except:
        return_info.status = bools_string(False)
        return_info.msg_code = MsgCode.MONGO_ERROR
        return_info.msg = "根据table获得mongo链接失败"
        return_info.data = {"err_except": traceback.format_exc()}
        return return_info.todict()
    query = {}
    query.update(and_param)
    if len(or_param) > 0:
        query.update({
            "$or": or_param
        })
    if len(field) == 0:
        field = None
    else:
        field.update({"path": 1})
    rows = await mongo_conn.find(query, field, limit=limit, sort=order).to_list(None)
    new_rows = []
    if rows:
        for row in rows:
            if "path" in row.keys():
                try:
                    row = json.loads(BaseFile.single_read_file(row["path"]))
                except:
                    return_info.status = bools_string(False)
                    return_info.msg_code = MsgCode.SERVER_ERROR
                    return_info.msg = "获取本地磁盘存放数据失败"
                    return_info.data = {}
                    return return_info.todict()
            new_rows.append(row)
    return_info.status = SUCCESS
    return_info.msg_code = 200
    return_info.msg = "查询mongodb成功"
    return_info.data = new_rows
    return return_info.todict()
