from jili.tool.log2dict import log2dict
from jili.data.db import getdb_client,insert_one,cu_create_index,get_distinct_keys_batched
from jili.tool.state import config as mongoconfig
from jili.tool import is_path
from jili.core import save,load
import pandas as pd
from bson.objectid import ObjectId
import os
import uuid
from jili.core.printlog import print
cu_logs={}
dbs={}
def get_loacl_mongo_ip():
    ip=mongoconfig.mongodb_ip
    return ip
def getlog_mongo(name,find,filter={},dbname="research",ip=None,auth="ant:ant",port=None):
    db=getdb_client(dbname=dbname,ip=ip,auth=auth,port=port)
    cu=db[name]
    rst=[]
    id=None
    for i in find.keys():
        if i=="_id":
            v=find[i]
            if isinstance(v,str):
                id=ObjectId(v)
    if id:
        find["_id"]=id
    iskey=False
    if len(filter)==1:
        iskey=True
        key=list(filter.keys())[0]
    for i in cu.find(find,filter):
        if iskey:
            rst.append(i[key])
        else:
            rst.append(i)
    db.client.close()
    return rst
def check_filter(d,v):
    k,f,t=v
    if f==">":
        s=d[k]>t
    elif f==">=":
        s=d[k]>=t
    elif f=="<":
        s=d[k]<t
    elif f=="<=":
        s=d[k]<=t
    else:
        s=d[k]==t
    return s
    # elif f==">":
    #     s=d[k]>t
    # elif f==">":
    #     s=d[k]>t
def dataframe_sort(d,sort):
    if sort:
        k,f=sort
        if f==1:
            d=d.sort_values(by=k,axis=0,ascending=True,na_position="last")
        else:
            d=d.sort_values(by=k, axis=0, ascending=False, na_position="last")
        return d
    else:
        return d
def dataframe_filter(d,filter):
    if filter:
        for v in filter:
            s=check_filter(d,v)
            d=d[s]
        return d
    else:
        return d
def getlog_cu(name,isexist_keys=False,indexs=None,unique_index=None,find={},isrelogin=False,dbname="research"):
    global cu_logs
    if name in cu_logs.keys() and isrelogin:
        return cu_logs[name],{}
    db = getdb_client(dbname=dbname, auth="ant:ant")
    cu = db[name]
    if indexs or unique_index:
        cu_create_index(cu, indexs=indexs, unique_index=unique_index)
    ss_keys={}
    if isexist_keys:
        for i in cu.find(find,{"key":1,"pcode":1}):
            key=i["key"]
            pcode=i["pcode"]
            if pcode not in ss_keys.keys():
                ss_keys[pcode]=[]
            if key not in ss_keys[pcode]:
                ss_keys[pcode].append(key)
    cu_logs[name]=cu
    return cu,ss_keys
def getlog_ss_keys(name,pcode=None,auth="ant:ant",savekey=None,dbname="research"):
    ss_keys = []
    if is_path(dbname):#增加回测结果pkl保存；应对mongodb问题
        if pcode is not None:
            dbname=os.path.join(dbname,pcode)
        if os.path.exists(dbname):
            for i in os.listdir(dbname):
                if "batchlog" in i:
                    s=load(os.path.join(dbname,i))
                    for ii in s:
                        ss_keys.append(ii["key"])
                else:
                    ii=i.replace(".pkl","")
                    ss_keys.append(ii)
        return ss_keys
    else:
        if name is None:
            return []
        db = getdb_client(dbname=dbname, auth=auth)
        cu = db[name]

        if pcode is None:
            if savekey:
                find={"key":{"$regex":savekey}}
            else:
                find={}
            # ss_keys = cu.distinct("key", find)
            ss_keys=get_distinct_keys_batched(cu,"key",find)
            # cu.aggregate([{"$group": {_id: {'imsi': "$imsi"}, count: {$sum: 1}}}], {allowDiskUse: true})
            return ss_keys
        elif isinstance(pcode,str):
            find={"pcode":pcode}
            filter={"key": 1, "pcode": 1}
        else:
            find = {}
            filter = {"key": 1, "pcode": 1}
        ss_keys={}
        for i in cu.find(find, filter):
            key = i["key"]
            pcode = i["pcode"]
            if pcode not in ss_keys.keys():
                ss_keys[pcode] = []
            if key not in ss_keys[pcode]:
                ss_keys[pcode].append(key)
    return ss_keys
def deal_mongo_recode(i):
    r={}
    for k,v in i.items():
        if not isinstance(k,str):
            k=str(k)
        k=k.replace(".","")
        if isinstance(v,dict):
            r[k]=deal_mongo_recode(v)
        else:
            r[k]=v
    return r


def close_logmongo():
    global dbs
    names=list(dbs.keys())
    for name in names:
        db=dbs[name]
        db.client.close()
        del dbs[name]
def save_log_cu_bypkl(name,pklurl,auth="ant:ant",isinsert_one=False,islog=True,dbname="research"):
    global dbs
    if name:
        if name in dbs.keys():
            cu = dbs[name]
        else:
            db = getdb_client(dbname=dbname, auth=auth)
            cu = db[name]
            dbs[name] = cu
    for i in os.listdir(pklurl):
        url=os.path.join(pklurl,i)
        if os.path.isdir(url):
            save_log_cu_bypkl(name, url, auth=auth, isinsert_one=isinsert_one, islog=islog, dbname=dbname)
        else:
            logs=load(url)
            if isinstance(logs, dict):
                logs = [logs]
            logs0 = []
            if islog:
                for i in logs:
                    i = deal_mongo_recode(i)
                    logs0.append(i)
            else:
                logs0 = logs
            try:
                if isinsert_one:
                    for i in logs0:
                        insert_one(cu, i)
                else:
                    cu.insert_many(logs0)
                    # db.client.close()
                os.remove(url)
            except Exception as e:
                print("error", "insert_many", e)
                # db.client.close()
                db = getdb_client(dbname=dbname, auth=auth)
                cu = db[name]
                dbs[name] = cu
                if isinsert_one:
                    for i in logs0:
                        insert_one(cu, i)
                else:
                    cu.insert_many(logs0)
                    # db.client.close()
                os.remove(url)
def save_log_cu(name,logs,auth="ant:ant",isinsert_one=False,islog=True,dbname="research",pcode=None):
    if is_path(dbname):  # 增加回测结果pkl保存；应对mongodb问题
        if pcode is not None:
            dbname=os.path.join(dbname,pcode)
        if isinstance(logs, dict):
            url=os.path.join(dbname,logs["key"]+".pkl")
            save(logs,url)
        else:
            uid=str(uuid.uuid4())
            url = os.path.join(dbname, uid + "_batchlog.pkl")
            save(logs, url)
    else:
        global dbs
        if name and logs:
            # import time
            # a=time.time()
            if name in dbs.keys():
                cu=dbs[name]
            else:
                db = getdb_client(dbname=dbname, auth=auth)
                cu = db[name]
                dbs[name]=cu
            # b=time.time()
            # s1=b-a
            # a = time.time()
            if isinstance(logs,dict):
                logs=[logs]
            logs0=[]
            if islog:
                for i in logs:
                    i=deal_mongo_recode(i)
                    logs0.append(i)
            else:
                logs0=logs
            try:
                if isinsert_one:
                    for i in logs0:
                        insert_one(cu, i)
                else:
                    cu.insert_many(logs0)
                    # db.client.close()
            except Exception as e:
                print("error", "insert_many", e)
                # db.client.close()
                db = getdb_client(dbname=dbname, auth=auth)
                cu = db[name]
                dbs[name] = cu
                for i in logs0:
                    insert_one(cu, i)
                # db.client.close()
            # b = time.time()
            # s2 = b - a
            # print("debug","save_log_cu",s1,s2)


def log2mongo(logfile,cu,filter_func=None,self_func=None,keys=[],del_keys=[],add_data={},filter_check_len=None,insert_batch=2000,cu_indexs=[],cu_unique_index=None):
    ip = get_loacl_mongo_ip()
    db = getdb_client(dbname="research", ip=ip, auth="ant:ant")
    cu0 = db[cu]
    data=log2dict(logfile=logfile,filter_func=filter_func,self_func=self_func,keys=keys,del_keys=del_keys,add_data=add_data,filter_check_len=filter_check_len)
    cu_create_index(cu0,indexs=cu_indexs,unique_index=cu_unique_index)
    if insert_batch==False and insert_batch==0 and insert_batch is None:
        for i in data:
            insert_one(cu0, i)
    else:
        n=0
        d=[]
        for i in data:
            d.append(i)
            n=n+1
            if n==insert_batch:
                cu0.insert_many(d)
                d=[]
                n=0
        cu0.insert_many(d)
    db.client.close()
def logs2mongo(args):
    for arg in args:
        logfile=arg["logfile"]
        cu=arg.get("cu","xingtai_ta")
        filter_func = arg.get("filter_func",None)
        self_func = arg.get("self_func",None)
        keys = arg.get("keys",[])
        del_keys = arg.get("del_keys",["sys"])
        add_data = arg.get("add_data",{})
        filter_check_len = arg.get("filter_check_len",None)
        insert_batch = arg.get("insert_batch",2000)
        cu_indexs = arg.get("cu_indexs",["key","盈利点数","年化收益"])
        cu_unique_index = arg.get("cu_unique_index",None)
        log2mongo(logfile=logfile, cu=cu, filter_func=filter_func, self_func=self_func, keys=keys, del_keys=del_keys, add_data=add_data,
                  filter_check_len=filter_check_len, insert_batch=insert_batch, cu_indexs=cu_indexs, cu_unique_index=cu_unique_index)
def log2dict_key(logfile,key,filter_func=None,self_func=None,keys=[],del_keys=[],add_data={},filter_check_len=None):
    data=log2dict(logfile=logfile,filter_func=filter_func,self_func=self_func,keys=keys,del_keys=del_keys,add_data=add_data,filter_check_len=filter_check_len)
    rst=[]
    for i in data:
        if isinstance(key,str):
            if key in i.keys():
                k=i[key]
                # if k not in rst:
                rst.append(k)
        else:
            t=[]
            for k in keys:
                t.append(i[k])
            rst.append(t)
    return rst
def logs2dict_key(args):
    rst=[]
    for arg in args:
        logfile=arg["logfile"]
        key=arg.get("key","key")
        filter_func = arg.get("filter_func",None)
        self_func = arg.get("self_func",None)
        keys = arg.get("keys",["sys", "ss_name", "version", "key", "pcode", "reverse_bs"])
        del_keys = arg.get("del_keys",["sys"])
        add_data = arg.get("add_data",{})
        # filter_check_len0=len(keys)+1
        filter_check_len = arg.get("filter_check_len",None)
        d=log2dict_key(logfile=logfile, key=key, filter_func=filter_func, self_func=self_func, keys=keys, del_keys=del_keys, add_data=add_data,
                  filter_check_len=filter_check_len)
        rst.extend(d)
    return rst
def get_mongologs_key(name,auth="ant:ant",key="key"):
    ip = get_loacl_mongo_ip()
    db = getdb_client(dbname="research", ip=ip, auth=auth)
    cu0 = db[name]
    keys=cu0.distinct(key)
    db.client.close()
    return keys
def get_mongo_performance(name,filter=[],filter_func=None,mongo_find={},sort=None,n=None,isdict=False):
    ip = get_loacl_mongo_ip()
    db = getdb_client(dbname="research", ip=ip, auth="ant:ant")
    cu0 = db[name]
    data=[]
    for i in cu0.find(mongo_find):
        isadd=True
        if filter_func:
            if not filter_func(i):
                isadd=False
        if isadd:
            del i["_id"]
            data.append(i)
    d=pd.DataFrame(data)
    d=dataframe_filter(d,filter)
    d=dataframe_sort(d,sort)
    if isdict:
        d=d.to_dict("index")
    db.client.close()
    return d
def ff(x):
    if "周收益" in x:
        x="("+x.split(", '周收益")[0].replace("(","",10)
        if "})" not in x:
            x=x+"})"
    return x
