import asyncio
import datetime
import gzip
import json

###########################################
# 同项目调用基础包
import os
import sys

from re_common.vip.baseencodeid import BaseLngid

filepath = os.path.abspath(__file__)
pathlist = filepath.split(os.sep)
pathlist = pathlist[:-5]
TopPath = os.sep.join(pathlist)
sys.path.insert(0, TopPath)
print(TopPath)
############################################

from re_common.baselibrary.utils.basefile import BaseFile
from re_common.baselibrary.utils.basemotor import BaseMotor

bs = BaseMotor()
bs.AsyncIOMotorClient(
    "mongodb://192.168.31.30:32417/htmljson.cx_journal_detail?authSource=htmljson",
    "htmljson")

bs.get_col("cx_journal_detail")

path = r"D:\qw_new_bigjson\chaoxing_html.big_json.gz"
# f = open(file=path, mode="w", encoding="utf-8")
f = gzip.open(path, "wb")
i = 0


async def dic_hook(doc):
    global f, path, i
    # print(doc["_id"])
    detail_url = 'http://qikan.chaoxing.com' + doc['jsondicts']['url']
    html = doc['html_detail']
    dxid = doc['_id']
    lngid = BaseLngid().GetLngid("00006", dxid)
    info = {
        "rawid":dxid,
        "lngid":lngid,
        'url': detail_url,
        'html': html
    }
    line = json.dumps(info, ensure_ascii=False) + '\n'
    lines = line.encode()
    f.write(lines)
    i = i + 1
    # size = BaseFile.get_file_size(path)
    print(i)
    if i >= 100000:
        f.close()
        path = BaseFile.get_new_filename(path)
        # f = open(file=path, mode="w", encoding="utf-8")
        f = gzip.open(path, "wb")
        i = 0
    # await bs.update({"_id": doc["_id"]}, {"pdf_html_stat": "1", "update_time": datetime.datetime.now()})


asyncio.get_event_loop().run_until_complete(bs.find(dic_hook, {"qwcontentDivId_stat": "1", "pdf_html_stat": "2"}))
f.close()
