import os
import typing
import json
import copy
import queue
import time
import signal
import multiprocessing
from multiprocessing import Queue
from squirrel_core.frame import start_scrapy_job

_manager = None


def update_scrapy_conf(object_class:str):
    crawl_config_dict = {
        'job_config': {
            'proxy_config': {
                'use_proxy': True,
                'proxy_q_name': 'makaka_spider',
                'proxy_max_use_count': 10,
            },
            'crawl_increment': True,
            'concurrent': 32,
            'spider_class': 'shuzixindong_score',
            'spider_name': 'spider_name3',
            'name_second': 'name_second',
            'name_first': 'name_first'
        },
    }
    _config = copy.deepcopy(crawl_config_dict)
    _config['job_config']['spider_class'] = object_class
    _config['job_config']['spider_name'] = object_class
    _config['job_config']['name_first'] = object_class.split("_")[0]
    _config['job_config']['name_second'] = object_class.split("_")[1]
    return _config


def start(object_class: str, queue: list, params: dict):
    crawl_increment = params.get("spider_config", {}).get("crawl_increment", True)
    use_proxy = params.get("spider_config", {}).get("use_proxy", True)
    env = params.get("spider_config", {}).get("env", "")
    config = update_scrapy_conf(object_class)
    config["job_config"]["env"] = env
    config["job_config"]["crawl_increment"] = crawl_increment
    config["job_config"]["proxy_config"]["use_proxy"] = use_proxy
    # kwargs = {'config': config, 'queue': queue}
    start_scrapy_job(object_class, config=config, queue=queue)
    # start_scrapy_job(object_class, **kwargs)


# def _close_queue(queue_list: typing.MutableSequence):
#     for idx in range(len(queue_list)):
#         q = queue_list.pop()
#         if q and isinstance(q, queue.Queue):
#             try:
#                 while q.qsize():
#                     q.get_nowait()
#             except:
#                 pass
#             finally:
#                 del q

def _close_queue(queues):
    for q in queues:
        try:
            while True:
                q.get_nowait()
        except (queue.Empty, OSError, EOFError):
            pass
        finally:
            try:
                q.close()
            except:
                pass


def get_manager():
    global _manager
    if _manager is None:
        _manager = multiprocessing.Manager()
    return _manager


def scrapy_task(spider_class: str, params: typing.MutableMapping, main_fu=None, timeout=60):
    # manager = multiprocessing.Manager()
    # manager = get_manager()
    queue0 = Queue()
    queue1 = Queue()
    _queue = [queue0, queue1]
    # _queue[0].put(json.dumps(params))
    # p = multiprocessing.Process(target=start, args=(spider_class, _queue, params), daemon=True)
    # p.start()
    # _data_json = {}
    # try:
    #     sleep_time = 1
    #     while timeout:
    #         try:
    #             _data = _queue[1].get(timeout=sleep_time)
    #             if _data:
    #                 _data_json = json.loads(_data)
    #                 if _data_json['_data_type'] == 1:
    #                     data_dict = _data_json['_data']
    #                     data_dict.update({"pid": p.pid})
    #                     return data_dict
    #         except queue.Empty:
    #             pass
    #         timeout -= sleep_time
    # finally:
    #     _close_queue(_queue)
    _queue[0].put(json.dumps(params))
    p = multiprocessing.Process(target=start, args=(spider_class, _queue, params))
    p.start()

    _data_json = {}
    start_time = time.time()
    try:
        while p.is_alive():
            try:
                time.sleep(0.5)
                if not _queue[1].empty():
                    _data = _queue[1].get(timeout=1)
                    if _data:
                        _data_json = json.loads(_data)
                        if _data_json.get('_data_type') == 1:
                            data_dict = _data_json['_data']
                            data_dict.update({"pid": p.pid})
                            signal.alarm(0)
                            return data_dict
                if time.time() - start_time > timeout:
                    break

            except queue.Empty:
                pass
            except Exception as e:
                print(f"Error processing data: {e}")
                break
    except TimeoutError as e:
            print(f"Task timeout: {e}")
    finally:
        signal.alarm(0)
        # _cleanup_process(p)
        _close_queue(_queue)


def _cleanup_process(process, timeout=5):
    if process and process.is_alive():
        print(f"Cleaning up process {process.pid}...")

        process.terminate()
        process.join(timeout=timeout)

        if process.is_alive():
            print(f"Force killing stubborn process {process.pid}...")
            process.kill()
            process.join(timeout=2)

        if process.is_alive():
            print(f"CRITICAL: Process {process.pid} survived kill!")
        else:
            print(f"Process {process.pid} cleaned up")
