# pip install 'pymongo[srv]'
# pip install dnspython
import os
import sys
import pandas as pd
import  pymongo
from get_data import other
global client

try:
    client = pymongo.MongoClient(
        "mongodb+srv://ltsjim:ltsjack123@cluster0.x6bey.mongodb.net/myFirstDatabase?retryWrites=true&w=majority")
    if client:
        pass
    else:
        print("数据库没读取成功")
except:
    client = pymongo.MongoClient(
        "mongodb://ltsjim:ltsjack123@cluster0-shard-00-00.x6bey.mongodb.net:27017,cluster0-shard-00-01.x6bey.mongodb.net:27017,cluster0-shard-00-02.x6bey.mongodb.net:27017/admin?ssl=true&replicaSet=atlas-v4n9jm-shard-0&authSource=admin&retryWrites=true&w=majority")
def _save_web(df,db_name,col_name):
    """

    :param df:
    :param path:
    :return:
    """
    #>>>Altas_db.ths_save(ts_get_data, "1.1.数据备份快速下载")
    mydb=client['{}'.format(db_name)]
    mycol = mydb['{}'.format(col_name)]
    mycol.delete_many({})
    # 将DataFrame存入到mongodb中
    df = df.to_dict(orient='records')  # 固定写法
    mycol.insert_many(df)
    print("保存Atlas/{}/{}中".format(db_name,col_name))
#________________
def _save_mongo_db(df,db_name,col_name,*args,**kwargs):
    path_mongo_db = os.path.dirname(os.path.dirname(__file__))
    path_ = os.path.join(path_mongo_db, "mongo_db", "{}".format(db_name), '{}.json'.format(col_name))
    df.to_csv(path_, encoding="utf-8")
    print("保存mongo_db/{}/{}中".format(db_name,col_name))
    return df
def dxw_bk_save(df,timesramp:str,dtype="bk"):
    """

    :param df:
    :param timesramp:20211126
    :param dtype:bk:板块   20211126_bk
                 gegu:个股 20211126_
    :return:
    """
    path_mongo_db = os.path.dirname(os.path.dirname(__file__))
    if dtype=="bk":
        path_ = os.path.join(path_mongo_db, "mongo_db", "{}".format('dxw_bk'), '{}_bk.json'.format(timesramp))
        df.to_csv(path_, encoding="utf-8")
        print("保存mongo_db/{}/{}_bk中".format("dxw_bk", timesramp))
        return df
    if dtype=="gegu":
        path_ = os.path.join(path_mongo_db, "mongo_db", "{}".format('dxw_bk'), '{}_.json'.format(timesramp))
        df.to_csv(path_, encoding="utf-8")
        print("保存mongo_db/{}/{}_中".format("dxw_bk", timesramp))
        return df


#_________________________________
def _readdf(db_name,col_name,dtype="normal",*args,**kwargs):
    """

    :param db_name:
    :param col_name:
    :param dtype: dtype=normal
    :return:
    """
    path_mongo_db = os.path.dirname(os.path.dirname(__file__))
    path_ = os.path.join(path_mongo_db, "mongo_db", "{}".format(db_name), '{}.json'.format(col_name))
    #print("读取地址",path_)
    if os.path.exists(path_) is False:
        return None
    if os.path.exists(path_) is True:
        df=pd.read_csv(path_, encoding="utf-8",index_col=0)
        if dtype=="normal":
            df=other.re_read_df(df)
        if dtype=="dxw":
            df=df

        # mycol = client[db_name][col_name]
        # df=pd.DataFrame(mycol.find({}))
        # df=df.drop(['_id'], axis=1)
        return df
def _read_ths_volMax(dtype="vol",model="returndf"):
    """

    :param dtype: vol /pri
     "returndf
    "returnmaxname"
    :return:
    """
    path_mongo_db = os.path.dirname(os.path.dirname(__file__))
    path_ = os.path.join(path_mongo_db, "mongo_db", "{}".format("ths_vol"),)
    list_i = os.listdir(path_)#client["ths_vol"].collection_names()
    df = pd.DataFrame(list_i)

    if dtype=="vol":
        df = df[df[0].str.contains('zvol')]
    if dtype=="pri":
        df = df[df[0].str.contains('zpri')]
    list_i = df[0].tolist()
    list_i=sorted(list_i, reverse=True)
    #print(">>>>>>>>>>>>>>>>>>111111111111111111111",list_i,sorted(list_i,reverse=True))
    path_ = os.path.join(path_mongo_db, "mongo_db", "{}".format("ths_vol"),max(list_i) )
    if model=="returndf":
        df=pd.read_csv(path_, encoding="utf-8",index_col=0)
        df=other.re_read_df(df)
        return df
    if model=="TwoMaxdf":
        df1 = pd.read_csv(path_, encoding="utf-8",index_col=0)
        df1=other.re_read_df(df1)
        path_2 = os.path.join(path_mongo_db, "mongo_db", "{}".format("ths_vol"), list_i[1])
        df2= pd.read_csv(path_2, encoding="utf-8",index_col=0)
        df2= other.re_read_df(df2)
        return df1,df2
    if model=="returnMaxName":
        return list_i[0]
    if model=="TwoMaxName":
        return list_i[0],list_i[1]
#____________________________________________
# from pymongo import MongoClient
# import os
# import pandas as pd
# path=os.path.dirname(__file__)
# path1="{}/{}".format(path,"X509-cert-8882754722637580045.pem")
#
# # uri = "mongodb+srv://cluster0.x6bey.mongodb.net/myFirstDatabase?authSource=%24external&authMechanism=MONGODB-X509&retryWrites=true&w=majority"
# # client = MongoClient(uri,
# #                      tls=True,
# #                      tlsCertificateKeyFile=path1)
# #读取
# db = client['ths']
# mycol = db['thshy']
# print(mycol)
# df=pd.DataFrame(mycol.find({}))
# df=df.drop(['_id'], axis=1)
# print(df)
#保存
#     #>>>Altas_db.ths_save(ts_get_data, "1.1.数据备份快速下载")
#     mydb=client['{}'.format(db_name)]
#     mycol = mydb['{}'.format(col_name)]
#     mycol.delete_many({})
#     # 将DataFrame存入到mongodb中
#     df = df.to_dict(orient='records')  # 固定写法
#     mycol.insert_many(df)
#     print("保存Atlas/{}/{}中".format(db_name,col_name))
#____________________________________________
#todo redis
import redis
import pickle
def redis_test():
    # redis.Redis(host="localhost",port=6379,db=0)
    pool = redis.ConnectionPool(host='localhost', port=6379, db=0)
    r = redis.Redis(connection_pool=pool)
    r.set("country", "dalian")
    r.set("city", "英国")
    w = r.get("city").decode("utf-8")
    print(w)
    del r
    df=pd.DataFrame()
    redis_r = redis.Redis(host="localhost", port=6379, db=0)
    df_bytes = pickle.dumps(df)
    redis_r.set('test_df', df_bytes)

    df_bytes_from_redis = redis_r.get('test_df')
    df_from_redis = pickle.loads(df_bytes_from_redis)
    print(df_from_redis)
    w = redis_r.get("city").decode("utf-8")
#____________________________________________
# sqlite 导入  导出  sql代码执行
# import sqlite3
# from sqlalchemy import Column, String, create_engine
# from sqlalchemy_utils import database_exists, create_database
# path=os.path.dirname(os.path.dirname(__file__))
# # 初始化数据库连接:
# engine =create_engine('sqlite:///{}/resource.db'.format(path))
# print(engine)
# from sqlalchemy import Table, Column, Integer, String, MetaData, ForeignKey
# #import tushare as ts
# #df=ts.get_today_all()
# df = pd.DataFrame([[1, "a"], [2, "c"], [3, "p"]], columns=["user_id", "grand"])
# table_name="11"
# pd.io.sql.to_sql(df,table_name,con=engine, schema="main", index=False, index_label=False, if_exists='append', chunksize=1000)
# if not database_exists(engine.url):
#     create_database(engine.url)

# print(engine.url)
if __name__ == '__main__':
    pass
