import threading
import functools
import logging
import daemon
import queue
import json
import time

import LoadRequestArgs
import TransforFile
import GetL1cRaster
import Downloader
import Seacher


class MaxLinesHandler(logging.Handler):
    
    def __init__(self, filename,max_lines=None):
        super().__init__()
        self.max_lines = max_lines
        self.filename = filename
    
    def emit(self, record):
        with open(self.filename, 'a') as file_handle:
            file_handle.write(self.format(record) + '\n')
        with open(self.filename, 'r') as file_handle:
            lines = file_handle.readlines()
        if self.max_lines is not None:
            if len(lines) > self.max_lines:
                with open(self.filename, 'w') as file_handle:
                    file_handle.writelines(lines[-self.max_lines:])


class SameLogFilter(logging.Filter):
    def __init__(self):
        self.last_log = None

    def filter(self, record):
        current_log = record.getMessage()
        if current_log != self.last_log:
            self.last_log = current_log
            return True
        return False


def setup_logger(name,max_lines,log_file):
    # 创建日志记录器
    logger = logging.getLogger(name)
    logger.setLevel(logging.DEBUG)
    # 创建,添加自定义的 Handler文件处理程序
    file_handler = MaxLinesHandler(filename=log_file,max_lines=max_lines)
    file_handler.setLevel(logging.INFO)
    # 创建,添加自定义过滤器
    log_filter = SameLogFilter()
    file_handler.addFilter(log_filter)
    # 创建日志格式器
    formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
    file_handler.setFormatter(formatter)
    # 添加处理程序到日志记录器
    logger.addHandler(file_handler)
    return logger


def error_loop(func,args):
    # func(*args)
    while True:
        try:
            func(*args)
            break
        except Exception as e:
            time.sleep(300)
            print(func,e)
    return


def run_in_thread(name,logpath,daemon=False,maxlines=None):
    logger = setup_logger(f'{name}',maxlines,logpath)
    def decorator(func):
        @functools.wraps(func)
        def wrapper(*args, **kwargs):
            thread = threading.Thread(
                target=error_loop, 
                args=(func,(*args,logger)), 
                name=name,
                daemon=daemon,
                kwargs=kwargs)
            thread.start()
            return thread
        return wrapper
    return decorator


def core(task_id,qhdm,period,account_index):
    with open('./config.json',"r") as file:
        params = json.load(file)
        agent = params["agent"]
        accounts = params["accounts"]
        cargs = params["cargs"]
        save_dir = params["save_dir"]
        table_name = params["table_name"]
        interval = params['interval']
        ssh_args = params['ssh_args']
        remote_dir = params['remote_dir']
        remote_ip = params["remote_host_ip"]
    account = params["accounts"][account_index]
    proxies = {
        'http': f"{agent['ip']}:{agent['port']}",
        'https': f"{agent['ip']}:{agent['port']}"}
    proxy = f"http://{agent['ip']}:{agent['port']}"
    cloud = 50
    attmpt = 15
    
    # 初始化
    LoadRequestArgs.Initialization(period,save_dir,cargs,table_name)
    
    # 用于线程间通讯
    loading_queue = queue.Queue(maxsize=10000) 
    product_queue = queue.Queue(maxsize=10000) 
    transfer_queue = queue.Queue(maxsize=10000) 
    zipfile_queue = queue.Queue(maxsize=10000) 
    messge_queue = queue.Queue(maxsize=10000) 
    
    # 日程线程
    loading_thread = run_in_thread(
        name='日程线程',
        logpath=f'{task_id}-日程线程.log',
        maxlines=500)(LoadRequestArgs.core)(
            cargs,qhdm,period,cloud,interval,loading_queue)

    # 搜索线程
    search_thread = run_in_thread(
        name='搜索线程',
        logpath=f'{task_id}-搜索线程.log',
        maxlines=1000)(Seacher.core)(
            cargs,proxies,loading_queue,product_queue)
    
    # 下载线程
    download_thread = run_in_thread(
       name='下载线程',
       logpath=f'{task_id}-下载线程.log',
       maxlines=None)(Downloader.core)(
           account,save_dir,attmpt,agent,product_queue,transfer_queue)
    
    # 传输线程
    transfor_thread = run_in_thread(
        name='传输线程',
        logpath=f'{task_id}-传输线程.log',
        maxlines=1000)(TransforFile.core)(
            ssh_args,remote_dir,transfer_queue,zipfile_queue)
    
    # 合成栅格
    getraster_thread = run_in_thread(
        name='合成线程',
        logpath=f'{task_id}-合成线程.log',
        maxlines=1000)(GetL1cRaster.core)(
            remote_ip,remote_dir,zipfile_queue,messge_queue)
    
    
    # 等待所有子进程结束
    loading_thread.join()
    search_thread.join()
    download_thread.join()
    transfor_thread.join()
    return 


def main():
    task_id = 'test_41_new'
    qhdm = '41'
    period = ['2024-02-01', '2024-04-01']
    account_index = 3
    print(f'任务参数：{task_id} {qhdm} {period} {account_index}')
    core(task_id,qhdm,period,account_index)
    return


if __name__ == "__main__":
    # task_id = 'test_14'
    # qhdm = '14'
    # period = ['2024-03-01', '2024-04-01']
    # account_index = 0
    # core(task_id,qhdm,period,account_index)
    main()
    # context = daemon.DaemonContext(
    #     working_directory='/data/ygsfb/projects/sentinel-raster-vector',
    #     stdout=open('主进程_stdout.txt', 'w+'),
    #     stderr=open('主进程_stderr.txt', 'w+'))
    # with context:
    #     main()


    
    



    
  


