"""
本程序主要用于生成big文件
"""
import json
import platform
import time

import facade
from pymongo import MongoClient
from xjlibrary.mprocesspoll.process_distributed.BaseTask import BaseTask
from xjlibrary.network.MyIP import GetLocalIPByPrefix, get_local_ip
from xjlibrary.our_file_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
configfile = BaseDir.get_new_path(curPath, "db.ini")
topPath = BaseDir.get_upper_dir(curPath, -3)
bigpath = BaseDir.get_new_path(topPath, "download", "sipogov", "download", "bigjson")
BaseDir.create_dir(bigpath)


class NodeTask(BaseTask):
    def __init__(self):
        super().__init__()
        # 连接到服务器，也就是运行task_master.py的机器:
        sysstr = platform.system()
        if sysstr == "Windows":
            self.ip = GetLocalIPByPrefix("192.168.")
        else:
            self.ip = get_local_ip("enp2s0")
        self.server_addr = self.ip
        self.port = 5000
        self.init_node(self.server_addr, self.port)


class BigFileCreate(object):
    def __init__(self):
        self.logger = facade.get_streamlogger()
        self.connection = MongoClient(
            "mongodb://xujiangrw:vipdatacenter@192.168.30.171:27017",
            # 如果“True”且服务器正在运行而没有日志记录，阻塞，直到服务器将所有数据文件同步到磁盘为止。
            fsync=False
        )
        self.db = self.connection['cnipa']
        self.mysqlutils = facade.MysqlUtiles(configfile,
                                             "db",
                                             logger=self.logger)
        self.filename = str(int(time.time())) + ".big_json"
        self.node = NodeTask()
        self.init_node()

    def init_node(self):
        self.node.conn()
        self.node.set_task_result_obj()


    def createdb3(self):
        self.db3 = facade.Sqlite3Utiles(logger=self.logger). \
            Sqlite3DBConnectFromFilePath("./inputs.db3",
                                         encoding="utf-8")

        sql = """CREATE TABLE IF NOT EXISTS "article" ("rawid" text(100) NOT NULL,"stat" integer(20) NOT NULL DEFAULT 0,PRIMARY KEY ("rawid"));"""
        self.db3.ExeSqlliteSql(sql)

    def insert_data(self):
        sets = set()
        with open(r"./rawid_pubid.csv", "r", encoding="GB2312") as f:
            for line in f:
                rawid, pub_id = line.split(",")
                sets.add(rawid)
        sql = "insert or ignore into article (rawid) values (?)"
        lists = [(x,) for x in sets]
        self.db3.ExeSqlliteMany(sql, lists)

    def getrawid(self):
        # sql = "select rawid from article where stat=0 limit 10000"
        # rows = self.db3.SelectFromSqlliteFetchall(sql)
        # for row in rows:
        while True:
            rawid = self.node.task.get()
            rawid = rawid.strip()
            # rawid = row[0].strip()
            print(rawid)
            sql = "select json_info from article where rawid = '{}'".format(rawid)
            rows = self.mysqlutils.SelectFromDB(sql)
            pageinfo = rows[0][0]
            absfull = self.db.absfull.find_one({"requestsid": rawid})
            if not absfull:
                BaseDir.single_add_file("./notinf.txt", "absfull:"+rawid + "\n")
                continue
            absfull.pop("_id")
            absfull.pop("showviewlist")

            info = self.db.patentinfo.find_one({"requestsid": rawid})
            if not info:
                BaseDir.single_add_file("./notinf.txt", rawid + "\n")
                continue
            absfull["patentinfo"] = info["patentinfo"]
            absfull["cmsg"] = info["cmsg"]
            absfull["lawmsg"] = info["lawmsg"]
            absfull["pmsg"] = info["pmsg"]
            absfull["cpmsg"] = info["cpmsg"]
            absfull["ccount"] = info["ccount"]
            absfull["pcount"] = info["pcount"]
            absfull["tcount"] = info["tcount"]
            try:
                absfull["cpnum"] = info["cpnum"]
            except:
                absfull["cpnum"] = "0"
            absfull["msg"] = pageinfo
            jsonmsg = json.dumps(absfull, ensure_ascii=False)
            filepath = BaseDir.get_new_path(bigpath, self.filename)
            with open(filepath, "a", encoding="utf-8") as f:
                f.write(jsonmsg + "\n")
            if BaseDir.get_file_size(filepath) > 1100000000:
                self.filename = str(int(time.time())) + ".big_json"

            # sql = "update article set stat=1 where rawid='{}'".format(rawid)
            # self.db3.ExeSqlliteSql(sql)
            self.node.result.put(rawid)


if __name__ == "__main__":
    create = BigFileCreate()
    # create.createdb3()
    while True:
        create.getrawid()
        time.sleep(10)
