# Define here the models for your spider middleware
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html

import logging
from scrapy import signals
from itemadapter import is_item, ItemAdapter
from bs4 import BeautifulSoup
from importlib import import_module
from scrapy.exceptions import NotConfigured
from scrapy.http import HtmlResponse
from selenium.webdriver.support.ui import WebDriverWait
from selenium import webdriver
from urllib.parse import urlparse
import json
import uuid
import asyncio
from faststream.rabbitmq import RabbitmqBroker
from datetime import datetime

logger = logging.getLogger(__name__)

class BotManagerMiddleware:

    def __init__(self, broker_url, exchange_name, *args, **kwargs):
        self.broker_url = broker_url
        self.exchange_name = exchange_name
        self.broker = None

    @classmethod
    def from_crawler(cls, crawler):
        broker_url = crawler.settings.get('RABBITMQ_URL', 'amqp://guest:guest@localhost:5672/')
        exchange_name = crawler.settings.get('RABBITMQ_EXCHANGE', 'scrapy_messages')
        
        return cls(broker_url, exchange_name)

    async def _ensure_connection(self):
        if self.broker is None:
            self.broker = RabbitmqBroker(self.broker_url)
            await self.broker.connect()

    def publish_msg(self, routing_key, payload):
        try:
            loop = asyncio.get_event_loop()
            if loop.is_closed():
                loop = asyncio.new_event_loop()
                asyncio.set_event_loop(loop)
        except RuntimeError:
            loop = asyncio.new_event_loop()
            asyncio.set_event_loop(loop)

        loop.run_until_complete(self._publish_msg(routing_key, payload))

    async def _publish_msg(self, routing_key, payload):
        await self._ensure_connection()
        await self.broker.publish(
            payload,
            routing_key=routing_key,
            exchange=self.exchange_name
        )

    def process_request(self, request, spider):
        logger.info("process_spider_input request %s", request)
        logger.info("request_scheduled spider %s", request)
        payload = {
            'id': request.meta.get('id', ''),
            'task_id': request.meta.get('task_id', ''),
            'spider_name': spider.name,
            'url': request.url,
            'update_at': datetime.now().isoformat(),
            'request_status': "request_scheduled",
        }
        self.publish_msg("request.scheduled", json.dumps(payload))
        return None

    def process_exception(self, request, exception, spider):
        url = request.url
        payload = {
            'id': request.meta.get('id', ''),
            'task_id': request.meta.get('task_id', ''),
            'spider_name': spider.name,
            'url': request.url,
            'site': urlparse(url).netloc,
            'request_status': 'request_exception',
            'error_msg': str(exception),
            'result_encoding': request.encoding
        }
        self.publish_msg("request.exception", json.dumps(payload))

    def process_response(self, request, response, spider):
        logger.info("request_received spider %s", response)
        url = request.url
        page_title = response.xpath('//title/text()').extract_first()
        page_title = page_title.strip() if page_title else ''
        
        payload = {
            'id': request.meta.get('id', ''),
            'task_id': request.meta.get('task_id', ''),
            'spider_name': spider.name,
            'url': request.url,
            'scrapy_date': datetime.now().isoformat(),
            'page_title': page_title,
            'site': urlparse(url).netloc,
            'request_status': 'response_received',
            'response_text': response.text,
            'result_encoding': request.encoding
        }
        self.publish_msg("request.response", json.dumps(payload))
        return response


# class SiteStrategyMiddleware:

#     def __init__(self, redis_conn):
#         self.redis_conn = redis_conn


#     @classmethod
#     def from_crawler(cls, crawler):
#         redis_url = crawler.settings.get('MANAGER_REDIS_URL', 'redis://localhost:6379')
#         redis_conn = redis.from_url(redis_url)

#         middleware = cls(redis_conn)

#         return middleware
    
       
#     def process_request(self, request, spider):

#         id = uuid.uuid4()
#         payload = {
#             'spider_name': spider.name,
#             'id': str(id),
#         }
#         request.meta.update(payload)
    
#         site_strategies = self.redis_conn.get('site_strategies')
#         if site_strategies:
#             site = urlparse(request.url).netloc
#             d = json.loads(site_strategies)
#             if site in d:
#                 request.meta.update({
#                     d[site]: True
#                 })
#         return None # for continue to the next middleware
    



# class SeleniumMiddleware:

#     @classmethod
#     def from_crawler(cls, crawler):
#         driver_name = crawler.settings.get('SELENIUM_DRIVER_NAME', 'chrome')
#         # command_executor = crawler.settings.get('SELENIUM_COMMAND_EXECUTOR', 'http://craken-selenium-standalone-chrome:4444/wd/hub')
#         command_executor = crawler.settings.get('SELENIUM_COMMAND_EXECUTOR', '')
#         screen_resolution = crawler.settings.get('SELENIUM_SCREE_RESOLUTION', '1920x1080')
        

#         method_name = f"{driver_name.capitalize()}Options"
#         options = getattr(webdriver, method_name)()
#         options.set_capability('se:screenResolution', screen_resolution)

#         middleware = cls()
#         middleware.driver = webdriver.Remote(
#             command_executor=command_executor,
#             options=options
#         )

#         crawler.signals.connect(middleware.spider_closed, signals.spider_closed)

#         return middleware


#     def process_request(self, request, spider):
#         """Process a request using the selenium driver if applicable"""
#         if not request.meta.get('selenium', False):
#             return None

#         # TODO: timeout error
#         self.driver.get(request.url)

#         implicitly_wait = spider.crawler.settings.get('SELENIUM_IMPLICITLY_WAIT', 10)
        
#         wait = WebDriverWait(self.driver, implicitly_wait)

#         wait_until = request.meta.get('wait_until', None)
#         if wait_until:
#             wait.until(
#                 wait_until
#             )

#         page_source = self.driver.page_source

#         # Expose the driver via the "meta" attribute
#         request.meta.update({'driver': self.driver})

#         return HtmlResponse(
#             self.driver.current_url,
#             body=page_source,
#             encoding=request.encoding,
#             request=request
#         )
    

    


#     def spider_closed(self):
#         """Shutdown the driver when spider is closed"""
#         self.driver.quit()
