# encoding=utf8
from json import dumps
from urllib.parse import quote
from extra import *
from random import randint
from random import choice
from random import shuffle
from openpyxl import *

import logging
import os
import pickle
import requests
import time
import sys
import pymongo


"""全局配置"""


def default_mongoconf(ip="192.168.2.117", port=27017, db="work_object", key=None, col_key="id", usr="", pwd="", authdb=""):
    key = f"work{time.time()*(10**6)}_{randint(1000,10000)}" if not key else key
    return {
        "link": [ip, port],
        "auth": (usr, pwd),
        "authdb": authdb,
        "db": db,
        "col": key,
        "col_key": col_key
    }


class MONGODB(object):
    """mongo连接类"""

    def __init__(self, karg):
        super(MONGODB, self).__init__()
        self.conf = karg
        self.logger = logging.getLogger(type(self).__name__)
        self.db = self.conf["db"]
        self.col = self.conf["col"]
        self.link = self.DB_link(self.conf["col_key"])

    def DB_link(self, key):
        """Mongo连接"""
        con = pymongo.MongoClient(*self.conf["link"])
        if self.conf.get("authdb", None):
            con[self.conf["authdb"]].authenticate(*self.conf["auth"])
        dbs = con.database_names()

        if self.db not in dbs:
            print(f"| {self.db} created ")
        db = con[self.db]
        db[self.col].ensure_index(key, unique=True)
        return db

    def COL_save(self, data):
        """数据存储方法"""
        db = self.link
        try:
            db[self.col].insert(data)
        except pymongo.errors.DuplicateKeyError as e:
            self.logger.info(e)
        except Exception as e:
            self.logger.info(e)

class bot(object):
    """docstring for bot"""

    def __init__(self, mainurl):
        super(bot, self).__init__()
        self.mainurl = mainurl
        self.crawler = Anticrawler(source=mainurl)
        self.session = self.crawler.session
        self.logger = logging.getLogger(type(self).__name__)
        self.hotreload_file = f"{type(self).__name__}.pkl"
        self.raw_data_file = f"{type(self).__name__}_raw.json"
        self.raw_data = None
        self.raw_data_status = False
        self.maxpage = 0
        self.retrytimes = 3
        self.current_user = None
        self.randomUA = True

    def resp_check(self, resp, login=False):
        """响应处理，429,403错误自动切换用户"""
        if self.randomUA:
            self.crawler.random_user_agent()
        netcode = resp.status_code
        if netcode == 200:
            return True

    def sleep_report(self, t):
        self.logger.info('Sleeping ... Until %s' % time.strftime(
            '%Y-%m-%d %H:%M:%S', time.localtime(time.time() + t)))
        time.sleep(t)

    def get_new_identity(self, broken_user=None, code=0):
        """获取新身份"""
        pass

    def data_parser(self, *arg):
        """数据抽取清洗收录"""
        _ = arg
        resdata = {}
        return resdata

    def hotreload_load(self):
        """热启动"""
        if os.path.exists(self.hotreload_file):
            with open(self.hotreload_file, "rb") as f:
                try:
                    self.session = pickle.load(f)
                    self.crawler.session = self.session
                    self.logger.info("Hotreload Compleate")
                    return True
                except Exception as e:
                    self.logger.error(e)

    def hotreload_save(self):
        """热启动配置存取"""
        with open(self.hotreload_file, "wb") as f:
            pickle.dump((self.session), f)
            self.logger.info("Hotreload File Saved")
            return True

    def raw_data_save(self, data, filename=None):
        """部分源数据存取"""
        filename = filename if filename else self.raw_data_file
        with open(filename, "w", encoding="utf8") as f:
            f.write(dumps(data, indent=4, ensure_ascii=False))
            self.logger.info("Rawdata File Saved")

    def html_data_save(self, html, name="test.html"):
        with open(name, "w") as f:
            f.write(html)

    def sign_in(self):
        """登录并获取凭证"""
        pass

    def reload_main_page(self):
        """翻页爬行"""
        pass

    def get_data(self, format_url, format_data=None, types="", params=None, extraheader=None):
        params = params if params else {}
        url = format_url if format_data is None else format_url.format(
            *format_data)
        self.logger.info(url)
        resp = self.session.get(url, params=params, headers=extraheader)
        if self.resp_check(resp):
            return self.data_parser(types, resp)
        else:
            self.logger.error(resp.status_code)

        """获取指定目标数据"""
        pass

    def post_data(self, format_url, format_data=None, types="", params=None, data=None, json=None):
        params = params if params else {}
        url = format_url if format_data is None else format_url.format(
            *format_data)
        self.logger.info(url)
        resp = self.session.post(url, params=params, data=data, json=json)
        if self.resp_check(resp):
            return self.data_parser(types, resp)
        else:
            self.logger.error(resp.status_code)

        """获取指定目标数据"""
        pass

    def start(self):
        """启动"""
        pass

    def setproxy(self, uri):
        self.session.proxies = {"https": uri, "http": uri}


def save_worker(data, filename="res.pkl"):
    with open(filename, "wb") as f:
        pickle.dump((data), f)


def load_worker(filename="res.pkl"):
    with open(filename, "rb") as f:
        return pickle.load(f)
