import configparser
import json
import os
import socket
import threading
import traceback
import requests
import re

conf = configparser.ConfigParser()
conf.read('config.ini', encoding="utf-8")
classify_cache_root_dir = conf.get('cache', 'classify_cache_root_dir')
check_cache_root_dir = conf.get('cache', 'check_cache_root_dir')
check_cache_root_dir = check_cache_root_dir.split(',')
max_filenum_per_dir = conf.get('cache', 'max_filenum_per_dir')
max_filenum_per_dir = int(max_filenum_per_dir)
session = requests.session()

mkdir_lock = threading.Lock()


def thread_it(func, *args):
    """将函数打包进线程"""
    # 创建
    t = threading.Thread(target=func, args=args)
    # 守护 !!!
    t.setDaemon(True)
    # 启动
    t.start()


def validateTitle(title):
    rstr = r"[/\\\:\*\?\"\<\>\|]"  # '\ : * ? " < > |'
    new_title = re.sub(rstr, "_", title)  # 替换为下划线
    new_title = new_title[-100:]
    return new_title


task_page_num = dict()
task_page_file_num = dict()


def get_task_page_num():
    global task_page_num, task_page_file_num
    task_dirs = [os.path.join(classify_cache_root_dir, task_dir) for task_dir in os.listdir(classify_cache_root_dir)]
    for task_dir in task_dirs:
        # print(task_dir)
        page_dirs = [os.path.join(task_dir, page_dir) for page_dir in os.listdir(task_dir)]
        if len(page_dirs) == 0:
            continue
        task_name = os.path.basename(task_dir)
        task_page_num[task_name] = len(page_dirs)
        task_last_page_dir = os.path.join(task_dir, str(len(page_dirs)))
        # print(task_last_page_dir)
        task_page_file_num[task_name] = len(os.listdir(task_last_page_dir))


get_task_page_num()


# for task_dir in task_page_num:
#     print(task_dir, task_page_num[task_dir])
# for task_dir in task_page_file_num:
#     print(task_dir, task_page_file_num[task_dir])
# exit()


# 客户端
# 用于向服务器请求缓存文件
# 输入为字符串key和cache_name，输出为缓存文件
def get_cache(key, cache_name):
    try:
        client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        client.connect(('127.0.0.1', 16839))
        data = {
            'key': key,
            'cache_name': cache_name,
            'method': 'get_cache'
        }
        data = json.dumps(data)
        client.send(data.encode('utf-8'))
        data = client.recv(1024)
        data = data.decode('utf-8')
        if data == '':
            return None
        return data
    except:
        print(traceback.format_exc())
        return None


def add_cache(key, cache_name, file_path):
    try:
        client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        client.connect(('127.0.0.1', 16839))
        data = {
            'key': key,
            'cache_name': cache_name,
            'method': 'add_cache',
            'file_path': file_path
        }
        data = json.dumps(data)
        client.send(data.encode('utf-8'))
    except:
        print(traceback.format_exc())


# 缓存字符串
def cache(url: str,
          method: str = 'get',
          filetype: str = 'txt',
          headers: dict = None,
          params: dict = None,
          json: dict = None,
          cookies: dict = None,
          timeout: int = 10,
          verify: bool = False,
          strict: bool = False,
          min_effective_length: int = None,
          bad_words: list = None,
          cache_name: str = 'cache',
          save_cache_file: str = None,
          skip_req: bool = False
          ):
    """
    :param url: 请求url
    :param method: 请求方法
    :param filetype: 缓存文件类型：txt或其他。
    :param headers: 请求头
    :param params: 请求参数
    :param json: 请求json
    :param cookies: 请求cookies
    :param timeout: 请求超时时间
    :param verify: 是否验证ssl
    :param strict: 是否跳过缓存，严格执行请求
    :param min_effective_length: 判定响应数据有效的最小有效长度，若小于该长度则重新请求。
    :param bad_words: 响应数据中包含该列表中的词则重新请求。
    :param cache_name: 缓存类别，用于分块存储缓存文件。增强性能以及提高可维护性。
    :param save_cache_file: 缓存文件名，用于自定义缓存文件名。若不指定则默认为url+method+params。
    :param skip_req: 是否跳过请求，仅返回缓存文件内容。若没有缓存文件则返回None。
    """
    global key_to_file, task_page_num, task_page_file_num, tasks_that_require_loading_indexes
    # tasks_that_require_loading_indexes.add(cache_name)
    # 文本缓存
    if not strict:
        # 从预加载分级目录读取
        if save_cache_file is None:
            save_cache_file = url + str(method) + str(params) + str(json)
        save_cache_file = validateTitle(save_cache_file)
        # found_cache_file = key_to_file.get(save_cache_file)
        found_cache_file = get_cache(save_cache_file, cache_name)

        # 从旧版缓存读取
        if found_cache_file is None:
            check_cache_dirs = [os.path.join(check_cache_dir, cache_name) for check_cache_dir in check_cache_root_dir]
            check_cache_files = [os.path.join(check_cache_dir, save_cache_file) for check_cache_dir in check_cache_dirs]
            for check_cache_file in check_cache_files:
                if os.path.exists(check_cache_file):
                    found_cache_file = check_cache_file

        if found_cache_file:
            if filetype == 'txt':
                with open(found_cache_file, "r", encoding='utf-8') as f:
                    cache_content = f.read()

                if len(cache_content) > 0:
                    effective = True
                    if min_effective_length is not None and len(cache_content) < min_effective_length:
                        effective = False
                    if bad_words is not None:
                        for word in bad_words:
                            if word in cache_content:
                                effective = False
                                break
                    if effective:
                        return cache_content
            else:
                with open(found_cache_file, "rb") as f:
                    cache_content = f.read()
                if len(cache_content) > 0:
                    return cache_content
    if skip_req:
        return None
    for i in range(3):
        try:
            # 隧道域名:端口号
            tunnel = "e733.kdltps.com:15818"

            # 用户名密码方式
            username = "t10118462817972"
            password = "b0h4q71c"
            proxies = {
                "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": username, "pwd": password, "proxy": tunnel},
                "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": username, "pwd": password, "proxy": tunnel}
            }
            if method == 'get':
                a = requests.get(url, headers=headers, params=params, json=json, cookies=cookies, timeout=timeout,
                                 verify=verify)
                # verify=verify, proxies=proxies)
            else:
                a = requests.post(url, headers=headers, params=params, json=json, cookies=cookies, timeout=timeout,
                                  verify=verify)
                # verify=verify, proxies=proxies)
            print(a.status_code, len(a.content))
            if a.status_code != 200:
                print(a.content)
                return a.content

            if cache_name not in task_page_file_num:
                task_page_file_num[cache_name] = 0
            if cache_name not in task_page_num:
                task_page_num[cache_name] = 1

            if task_page_file_num[cache_name] >= max_filenum_per_dir:
                task_page_file_num[cache_name] = 0
                task_page_num[cache_name] += 1

            if save_cache_file is None:
                save_cache_file = url + str(method) + str(params)
            save_cache_file = validateTitle(save_cache_file)

            with mkdir_lock:
                if not os.path.exists(os.path.join(classify_cache_root_dir, cache_name)):
                    os.mkdir(os.path.join(classify_cache_root_dir, cache_name))
                if not os.path.exists(
                        os.path.join(classify_cache_root_dir, cache_name, str(task_page_num[cache_name]))):
                    os.mkdir(os.path.join(classify_cache_root_dir, cache_name, str(task_page_num[cache_name])))
                save_path = os.path.join(classify_cache_root_dir, cache_name, str(task_page_num[cache_name]))
                if not os.path.exists(save_path):
                    os.mkdir(save_path)

            save_cache_file_path = os.path.join(save_path, save_cache_file)
            # key_to_file[save_cache_file] = save_cache_file_path
            # add_cache(save_cache_file, cache_name, save_cache_file_path)
            thread_it(add_cache, save_cache_file, cache_name, save_cache_file_path)
            task_page_file_num[cache_name] += 1

            if filetype == 'txt':
                with open(save_cache_file_path, "w", encoding="utf-8") as f:
                    f.write(a.text)
                return a.text
            else:
                with open(save_cache_file_path, "wb") as f:
                    f.write(a.content)
                return a.content
        except KeyboardInterrupt:
            raise KeyboardInterrupt
        except requests.exceptions.ReadTimeout:
            print('请求超时')
        except requests.exceptions.SSLError:
            print('SSL错误')
        except requests.exceptions.ConnectTimeout:
            print('连接超时')
        except:
            print(traceback.format_exc())


# https___www.bilibili.com_video_BV11N4y1k7i9_
# url = 'https://www.bilibili.com/video/BV11N4y1k7i9'
# a = cache(url, cache_name='视频页面', skip_req=True, save_cache_file='https___www.bilibili.com_video_BV12u4y1Y7LC_')
# print(a)

class Cache:
    def get(self, url, filetype='txt', headers=None, params=None, json=None, cookies=None, timeout=10, verify=False, strict=False,
            min_effective_length=None, bad_words=None, cache_name='cache', save_cache_file=None, skip_req=False):
        return cache(url, 'get', filetype, headers, params, json, cookies, timeout, verify, strict, min_effective_length,
                     bad_words, cache_name, save_cache_file, skip_req)

    def post(self, url, filetype='txt', headers=None, params=None, json=None, cookies=None, timeout=10, verify=False, strict=False,
             min_effective_length=None, bad_words=None, cache_name='cache', save_cache_file=None, skip_req=False):
        return cache(url, 'post', filetype, headers, params, json, cookies, timeout, verify, strict, min_effective_length,
                     bad_words, cache_name, save_cache_file, skip_req)


cache_requests = Cache()
