import socket
import configparser
import json
import os
import threading
import traceback
import time

from tqdm import tqdm

# csv.field_size_limit(500 * 1024 * 1024)
conf = configparser.ConfigParser()
conf.read('config.ini', encoding="utf-8")
# cache_root_dir = conf.get('cache', 'cache_root_dir')
classify_cache_root_dir = conf.get('cache', 'classify_cache_root_dir')
check_cache_root_dir = conf.get('cache', 'check_cache_root_dir')
check_cache_root_dir = check_cache_root_dir.split(',')
max_filenum_per_dir = conf.get('cache', 'max_filenum_per_dir')
max_filenum_per_dir = int(max_filenum_per_dir)

key_to_file = dict()
tasks_that_require_loading_indexes = set()
task_with_index_already_loaded = set()

change_key_to_file_lock = threading.Lock()
change_tasks_that_require_loading_indexes_lock = threading.Lock()


def get_key_to_file():
    global key_to_file
    for task in tasks_that_require_loading_indexes:
        print(task, '开始加载缓存索引')
        task_dir = os.path.join(classify_cache_root_dir, task)
        # print('task_dir', task_dir)
        if not os.path.exists(task_dir):
            continue
        page_dirs = [os.path.join(task_dir, page_dir) for page_dir in os.listdir(task_dir)]
        with tqdm(total=len(page_dirs), desc='加载缓存索引') as pbar:
            for page_dir in page_dirs:
                # print(page_dir)
                file_list = os.listdir(page_dir)
                for file in file_list:
                    # print(file)
                    with change_key_to_file_lock:
                        key_to_file[file] = os.path.join(page_dir, file)
                pbar.update(1)
        print(task, '缓存索引加载完成')
        task_with_index_already_loaded.add(task)


def save_index():
    global key_to_file, task_with_index_already_loaded
    try:
        print('保存缓存索引...')
        with open('key_to_file_bak.json', 'w', encoding='utf-8') as f:
            with change_key_to_file_lock:
                json.dump(key_to_file, f, ensure_ascii=False)
        if os.path.exists('key_to_file.json'):
            # 删除旧文件
            os.remove('key_to_file.json')
        # 重命名新文件
        os.rename('key_to_file_bak.json', 'key_to_file.json')
        print('保存完成')
    except:
        print(traceback.format_exc())
        print('保存缓存索引失败')
    try:
        print('保存已加载缓存索引...')
        with open('task_with_index_already_loaded_bak.json', 'w', encoding='utf-8') as f:
            json.dump(list(task_with_index_already_loaded), f, ensure_ascii=False)
        if os.path.exists('task_with_index_already_loaded.json'):
            # 删除旧文件
            os.remove('task_with_index_already_loaded.json')
        # 重命名新文件
        os.rename('task_with_index_already_loaded_bak.json', 'task_with_index_already_loaded.json')
        print('保存完成')
    except:
        print(traceback.format_exc())
        print('保存已加载缓存索引失败')


def init():
    global key_to_file, task_with_index_already_loaded
    print('初始化缓存索引')
    if os.path.exists('key_to_file.json'):
        with change_key_to_file_lock:
            with open('key_to_file.json', 'r', encoding='utf-8') as f:
                s = f.read()
                if s:
                    key_to_file = json.loads(s)
    print('初始化已加载缓存索引')
    # print('把本地路径加入到预加载缓存索引的任务中')
    # task_dirs = os.listdir(classify_cache_root_dir)
    # with change_tasks_that_require_loading_indexes_lock:
    #     for task_dir in task_dirs:
    #         tasks_that_require_loading_indexes.add(task_dir)


def read_cache(data, conn):
    # print('接收到数据', data)
    key = data['key']
    if key in key_to_file:
        print('命中缓存', key)
        conn.send(key_to_file[key].encode('utf-8'))
    else:
        conn.send(''.encode('utf-8'))


def add_cache(data, conn):
    # print('接收到数据', data)
    key = data['key']
    cache_name = data['cache_name']
    file_path = data['file_path']
    with change_key_to_file_lock:
        key_to_file[key] = file_path
    print('添加缓存', key)


def handle_request(data, conn):
    data = data.decode('utf-8')
    data = json.loads(data)
    # print('开始处理数据', data)
    # 'method': 'get_cache'
    method = data['method']
    if method == 'get_cache':
        read_cache(data, conn)
    elif method == 'add_cache':
        add_cache(data, conn)
    else:
        print('未知请求')
    # print('处理完成', data)
    conn.close()


# 监听，输入为字符串key和cache_name，输出key_to_file[key]
# 如果cache_name不在缓存中，则将cache_name加入tasks_that_require_loading_indexes
def listen():
    global key_to_file
    global tasks_that_require_loading_indexes
    server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    server.bind(('127.0.0.1', 16839))
    server.listen(5)
    while True:
        conn, addr = server.accept()
        # print('接收到请求', addr)
        data = conn.recv(1024)
        t = threading.Thread(target=handle_request, args=(data, conn))
        t.setDaemon(True)
        t.start()


def main():
    init()
    t1 = threading.Thread(target=get_key_to_file)
    t1.daemon = True
    t1.start()

    t2 = threading.Thread(target=listen)
    t2.daemon = True
    t2.start()

    try:
        while True:
            time.sleep(1)
    except KeyboardInterrupt:
        save_index()
        print('退出程序')
    except Exception as e:
        print(traceback.format_exc())
        save_index()
        raise e


if __name__ == '__main__':
    main()
