import asyncio

from re_common.baselibrary.utils.basemotor import BaseMotor

bs = BaseMotor()
bs.AsyncIOMotorClient(
    "mongodb://datahouse:vipdatacenter@192.168.31.243:32920,192.168.31.208:32920,192.168.31.206:32920/data_warehouse.base_obj_ref_a?authSource=data_warehouse",
    "data_warehouse")

# bs.AsyncIOMotorClient(
#     "mongodb://192.168.31.30:32417",
#     "htmljson")
bs.get_col("base_obj_ref_a")

# bs.get_col("test")


# async def insert_file(filename, bs):
#     for line in BaseGzip(100).read_gz_file(filename):
#         dicts = json.loads(line)
#         dicts["_id"] = dicts["id"]
#         del dicts["id"]
#         print(dicts["_id"])
#         try:
#             await bs.insert_one(dicts)
#         except DuplicateKeyError:
#             print("出现重复id")
#             pass
#
#
# lists = []
# for file in BaseDir.get_dir_all_files(r"F:\db3\patnetjson_big"):
#     lists.append(insert_file(file, bs))
#
# print("***********")
# asyncio.get_event_loop().run_until_complete(asyncio.wait(lists))

# sets = set()
# i = 0
#
#
# async def dic_hook(doc):
#     print(doc["_id"])
#
#
# asyncio.get_event_loop().run_until_complete(bs.find(dic_hook, {}, {"_id"}))

loop = asyncio.get_event_loop()
loop.run_until_complete(bs.delete_many({}))

# print(";".join(sets))

#
# async def insert_file(filename, bs):
#     try:
#         for line in BaseGzip(100).read_gz_file(filename):
#             try:
#                 dicts = json.loads(line)
#                 dicts["_id"] = dicts["cid"] + "_" + dicts["pagenum"]
#                 try:
#                     await bs.insert_one(dicts)
#                 except DuplicateKeyError:
#                     pass
#             except:
#                 pass
#     except Exception as e:
#         print(filename)
#         BaseFile.single_add_file("./errfiles.txt", filename + "\n")
#         print(e)


# async def insert_file(filename, bs):
#     for line in BaseGzip(100).read_gz_file(filename):
#         dicts = json.loads(line)
#         dicts["_id"] = dicts["cid"] + "_" + dicts["pagenum"]
#         try:
#             await bs.insert_one(dicts)
#         except DuplicateKeyError:
#             pass
# def insert_file(filename, bs):
#     for line in BaseGzip(100).read_gz_file(filename):
#         dicts = json.loads(line)
#         dicts["_id"] = dicts["cid"] + "_" + dicts["pagenum"]
#         try:
#             bs.insert_one(dicts)
#         except DuplicateKeyError:
#             pass
# for file in BaseDir.get_dir_all_files(r"\\192.168.31.66\download\test_big_json"):
#    insert_file(file,bs)

# start_time = time.time()
# lists = []
# for file in BaseDir.get_dir_all_files(r"\\192.168.31.123\f\download\wf_zl\download\pages_big_json_over"):
#     lists.append(insert_file(file, bs))
#     # asyncio.get_event_loop().run_until_complete(insert_file(file,bs))
#
# for file in BaseDir.get_dir_all_files(r"\\192.168.31.66\e\download\wf_zl\download\pages_big_json_over"):
#     lists.append(insert_file(file,bs))
#
# print("***********")
# asyncio.get_event_loop().run_until_complete(asyncio.wait(lists))
# print(time.time() - start_time)

# f = open(file="./mongorawid.txt", mode="w", encoding="utf-8")
#
#
# async def dic_hook(doc):
#     print(doc["_id"])
#     # BaseFile.single_add_file("./mongorawid.txt", doc["_id"] + "\n")
#     f.write(doc["_id"] + "\n")
#
#
# asyncio.get_event_loop().run_until_complete(bs.find(dic_hook, {"export_stat": 1}, {"_id": 1}))
# f.close()

# with open("./sqlid.txt", "r", encoding="utf-8") as f:
#     a = set(f.readlines())
# with open("./mongorawid.txt", "r", encoding="utf-8") as f:
#     b = set(f.readlines())
#
# c =a-b
# d = set()
# for line in c:
#     line = line.strip()
#     d.add(line)
#
# sql = "update `pagess` set stat=0 where cid_pagenum in {}".format(tuple(d))
# print(sql)
