# -*- coding: utf-8 -*-
import json
from multiprocessing import Queue
from squirrel_core.frame import start_scrapy_job


class FrameworkAPI(object):

    def crawl_debug(self, class_name, name_first, name_second, queue_data=None, *args, **kwargs):
        job_config_name = 'job_config'
        from squirrel_core.api.api_data import debug_crawl_config_dict
        debug_crawl_config_dict[job_config_name]['name_second'] = name_second
        debug_crawl_config_dict[job_config_name]['name_first'] = name_first
        debug_crawl_config_dict[job_config_name]['spider_name'] = name_first + '_' + name_second
        debug_crawl_config_dict[job_config_name]['spider_class'] = class_name
        debug_crawl_config_dict[job_config_name]['env'] = queue_data.get("spider_config", {}).get("env", "")
        _queue = [Queue(), Queue()]
        if isinstance(queue_data, dict):
            queue_data = json.dumps(queue_data)
        _queue[0].put(queue_data)
        start_scrapy_job(class_name, config=debug_crawl_config_dict, queue=_queue)
