from importlib import import_module

# from pandas.compat.pyarrow import pa
from .base import BaseCrawler
import inspect
class CrawlerFactory:
    @classmethod
    def create_from_config(cls, config):
        crawlers = []
        for source_name, params in config.items():
            if "modules" in params:
                for module in params["modules"]:
                    for module_name, module_params in module.items():
                        try:
                            data_process_type = module_params.get("data_process_type","")
                            module_path = f'src.crawler.{source_name}.{module_params["parser"]}'
                            module = import_module(module_path)
                            crawler_class = getattr(module, module_params["parser"].capitalize())
                            url = ""
                            if params.get("source_type") == "web":
                                base_url = params.get("base_url", "")
                                path = module_params.get("path", "")
                                url = base_url + path
                            if params.get("source_type") == "api":
                                url = module_params.get("path", "")
                            request_params = module_params.get("params", {})
                            print(f"{module_name} 加载正常")
                            if 'params' in inspect.signature(crawler_class.__init__).parameters:
                                crawlers.append(crawler_class(
                                    name=module_name,
                                    url=url,
                                    data_process_type=data_process_type, 
                                    params=request_params
                                ))
                            else:
                                crawlers.append(crawler_class(
                                    name=module_name,
                                    url=url,
                                    data_process_type=data_process_type,  
                                    **request_params
                                ))
                        except ImportError as e:
                            print(f"Error importing module {module_params['parser']}: {str(e)}")
                        except AttributeError:
                            print(f"Crawler class {module_params['parser'].capitalize()} not found.")
            else:
                print(f"Warning: 'modules' key not found in {source_name} configuration. Skipping...")
        return crawlers