"""
Base scraper class with common functionality
"""
import time
import random
import logging
from abc import ABC, abstractmethod
from typing import List, Dict, Optional
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException, NoSuchElementException
import undetected_chromedriver as uc
from fake_useragent import UserAgent

from ..config import config, USER_AGENTS
from ..models import RestaurantData, DatabaseManager


class BaseScraper(ABC):
    """Base class for all platform scrapers"""
    
    def __init__(self, platform_name: str, database_manager: DatabaseManager):
        self.platform_name = platform_name
        self.db_manager = database_manager
        self.driver = None
        self.wait = None
        self.logger = logging.getLogger(f"{__name__}.{platform_name}")
        self.ua = UserAgent()
        
    def setup_driver(self):
        """Initialize Chrome driver with basic settings"""
        options = Options()

        # Basic options
        if config.HEADLESS_MODE:
            options.add_argument('--headless')
        options.add_argument('--no-sandbox')
        options.add_argument('--disable-dev-shm-usage')

        # Set window size for mobile-like experience
        options.add_argument('--window-size=375,812')

        try:
            self.driver = uc.Chrome(options=options)
            self.wait = WebDriverWait(self.driver, config.PAGE_LOAD_TIMEOUT)
            self.logger.info(f"Chrome driver initialized for {self.platform_name}")
        except Exception as e:
            self.logger.error(f"Failed to initialize driver: {e}")
            raise
            
    def close_driver(self):
        """Close the browser driver"""
        if self.driver:
            try:
                self.driver.quit()
                self.logger.info("Driver closed successfully")
            except Exception as e:
                self.logger.error(f"Error closing driver: {e}")
                
    def random_delay(self, min_delay: float = None, max_delay: float = None):
        """Add random delay between requests"""
        min_delay = min_delay or config.REQUEST_DELAY_MIN
        max_delay = max_delay or config.REQUEST_DELAY_MAX
        delay = random.uniform(min_delay, max_delay)
        time.sleep(delay)
        
    def safe_find_element(self, by: By, value: str, timeout: int = 10) -> Optional[object]:
        """Safely find element with timeout"""
        try:
            element = WebDriverWait(self.driver, timeout).until(
                EC.presence_of_element_located((by, value))
            )
            return element
        except TimeoutException:
            self.logger.warning(f"Element not found: {value}")
            return None
            
    def safe_find_elements(self, by: By, value: str, timeout: int = 10) -> List[object]:
        """Safely find multiple elements"""
        try:
            WebDriverWait(self.driver, timeout).until(
                EC.presence_of_element_located((by, value))
            )
            return self.driver.find_elements(by, value)
        except TimeoutException:
            self.logger.warning(f"Elements not found: {value}")
            return []
            
    def safe_get_text(self, element) -> str:
        """Safely extract text from element"""
        try:
            return element.text.strip() if element else ""
        except Exception as e:
            self.logger.warning(f"Error extracting text: {e}")
            return ""
            
    def safe_get_attribute(self, element, attribute: str) -> str:
        """Safely extract attribute from element"""
        try:
            return element.get_attribute(attribute) if element else ""
        except Exception as e:
            self.logger.warning(f"Error extracting attribute {attribute}: {e}")
            return ""
            
    def retry_operation(self, operation, max_retries: int = None, *args, **kwargs):
        """Retry operation with exponential backoff"""
        max_retries = max_retries or config.MAX_RETRIES
        
        for attempt in range(max_retries):
            try:
                return operation(*args, **kwargs)
            except Exception as e:
                if attempt == max_retries - 1:
                    self.logger.error(f"Operation failed after {max_retries} attempts: {e}")
                    raise
                else:
                    wait_time = (2 ** attempt) + random.uniform(0, 1)
                    self.logger.warning(f"Attempt {attempt + 1} failed, retrying in {wait_time:.2f}s: {e}")
                    time.sleep(wait_time)
                    
    @abstractmethod
    def navigate_to_city(self, city: str) -> bool:
        """Navigate to specific city page"""
        pass
        
    @abstractmethod
    def get_restaurant_urls(self, limit: int = None) -> List[str]:
        """Get list of restaurant URLs to scrape"""
        pass
        
    @abstractmethod
    def extract_restaurant_data(self, url: str) -> Optional[RestaurantData]:
        """Extract restaurant data from URL"""
        pass
        
    def scrape_city(self, city: str, limit: int = None) -> List[RestaurantData]:
        """Main scraping method for a city"""
        self.logger.info(f"Starting scraping for {city} on {self.platform_name}")
        
        # Create scraping session
        session_id = self.db_manager.create_scraping_session(self.platform_name, city)
        
        try:
            # Setup driver
            self.setup_driver()
            
            # Navigate to city
            if not self.navigate_to_city(city):
                raise Exception(f"Failed to navigate to {city}")
                
            # Get restaurant URLs
            restaurant_urls = self.get_restaurant_urls(limit)
            self.logger.info(f"Found {len(restaurant_urls)} restaurants to scrape")
            
            # Extract data from each restaurant
            scraped_data = []
            errors_count = 0
            
            for i, url in enumerate(restaurant_urls, 1):
                try:
                    self.logger.info(f"Scraping restaurant {i}/{len(restaurant_urls)}: {url}")
                    
                    restaurant_data = self.retry_operation(self.extract_restaurant_data, url=url)
                    if restaurant_data:
                        restaurant_data.city = city
                        restaurant_data.platform = self.platform_name
                        scraped_data.append(restaurant_data)
                        
                        # Save to database
                        self.db_manager.save_restaurant(restaurant_data)
                        
                    self.random_delay()
                    
                except Exception as e:
                    errors_count += 1
                    self.logger.error(f"Error scraping restaurant {url}: {e}")
                    
            # Update session
            from datetime import datetime
            self.db_manager.update_scraping_session(
                session_id,
                completed_at=datetime.now(),
                restaurants_scraped=len(scraped_data),
                errors_count=errors_count,
                status='completed' if scraped_data else 'failed'
            )
            
            self.logger.info(f"Scraping completed: {len(scraped_data)} restaurants, {errors_count} errors")
            return scraped_data
            
        except Exception as e:
            self.logger.error(f"Scraping failed for {city}: {e}")
            self.db_manager.update_scraping_session(
                session_id,
                completed_at=datetime.now(),
                status='failed',
                error_message=str(e)
            )
            raise
            
        finally:
            self.close_driver()
            
    def __enter__(self):
        return self
        
    def __exit__(self, exc_type, exc_val, exc_tb):
        self.close_driver()
