"""
OAI-PMH Client Implementation

Robust OAI-PMH client using the sickle library with rate limiting,
error handling, and resumption token support for ArXiv harvesting.
"""

import time
import logging
from datetime import datetime, timezone
from typing import Generator, Optional, Dict, Any
import random

from sickle import Sickle
from sickle.oaiexceptions import (
    OAIError, BadArgument, BadResumptionToken, 
    BadVerb, CannotDisseminateFormat, IdDoesNotExist,
    NoMetadataFormats, NoRecordsMatch, NoSetHierarchy
)
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry

from utils.exceptions import NetworkError, RateLimitError, ScrapingError


class OAIClient:
    """
    OAI-PMH client with robust error handling and rate limiting.
    
    Features:
    - 3-second rate limiting between requests
    - Exponential backoff retry logic
    - Resumption token handling
    - Network failure recovery
    - Request timeout handling
    """
    
    def __init__(self, base_url: str, rate_limit_delay: float = 3.0,
                 max_retries: int = 5, timeout: int = 30):
        """Initialize OAI client.
        
        Args:
            base_url: OAI-PMH base URL
            rate_limit_delay: Delay between requests in seconds
            max_retries: Maximum number of retries
            timeout: Request timeout in seconds
        """
        self.base_url = base_url
        self.rate_limit_delay = rate_limit_delay
        self.max_retries = max_retries
        self.timeout = timeout
        
        self.logger = logging.getLogger(__name__)
        
        # Initialize sickle client with custom session
        self.session = self._create_session()
        self.sickle = Sickle(base_url, session=self.session, timeout=timeout)
        
        # Rate limiting
        self._last_request_time = 0
        
        self.logger.info(f"OAI client initialized: {base_url}")
    
    def _create_session(self) -> requests.Session:
        """Create requests session with retry strategy."""
        session = requests.Session()
        
        # Configure retry strategy
        retry_strategy = Retry(
            total=self.max_retries,
            status_forcelist=[429, 500, 502, 503, 504],
            method_whitelist=["GET"],
            backoff_factor=1,
            respect_retry_after_header=True
        )
        
        adapter = HTTPAdapter(max_retries=retry_strategy)
        session.mount("http://", adapter)
        session.mount("https://", adapter)
        
        # Set user agent
        session.headers.update({
            'User-Agent': 'ArXiv-Scraper-Service/1.0 (research paper aggregation)'
        })
        
        return session
    
    def _rate_limit(self):
        """Implement rate limiting between requests."""
        elapsed = time.time() - self._last_request_time
        
        if elapsed < self.rate_limit_delay:
            sleep_time = self.rate_limit_delay - elapsed
            self.logger.debug(f"Rate limiting: sleeping for {sleep_time:.2f} seconds")
            time.sleep(sleep_time)
        
        self._last_request_time = time.time()
    
    def _retry_with_backoff(self, func, *args, **kwargs):
        """Execute function with exponential backoff retry logic."""
        for attempt in range(self.max_retries + 1):
            try:
                self._rate_limit()
                return func(*args, **kwargs)
                
            except (requests.exceptions.Timeout, requests.exceptions.ConnectionError) as e:
                if attempt == self.max_retries:
                    raise NetworkError(f"Network error after {self.max_retries} retries: {e}")
                
                backoff_time = (2 ** attempt) + random.uniform(0, 1)
                self.logger.warning(f"Network error on attempt {attempt + 1}, "
                                  f"retrying in {backoff_time:.2f} seconds: {e}")
                time.sleep(backoff_time)
                
            except BadResumptionToken as e:
                self.logger.error(f"Bad resumption token, cannot continue: {e}")
                raise ScrapingError(f"Bad resumption token: {e}")
                
            except NoRecordsMatch as e:
                self.logger.info("No records match the criteria")
                return []  # Return empty list for no records
                
            except (BadArgument, BadVerb, CannotDisseminateFormat) as e:
                # These are configuration errors, don't retry
                raise ScrapingError(f"OAI-PMH configuration error: {e}")
                
            except OAIError as e:
                if attempt == self.max_retries:
                    raise ScrapingError(f"OAI-PMH error after {self.max_retries} retries: {e}")
                
                backoff_time = (2 ** attempt) + random.uniform(0, 1)
                self.logger.warning(f"OAI-PMH error on attempt {attempt + 1}, "
                                  f"retrying in {backoff_time:.2f} seconds: {e}")
                time.sleep(backoff_time)
                
            except Exception as e:
                if attempt == self.max_retries:
                    raise ScrapingError(f"Unexpected error after {self.max_retries} retries: {e}")
                
                backoff_time = (2 ** attempt) + random.uniform(0, 1)
                self.logger.warning(f"Unexpected error on attempt {attempt + 1}, "
                                  f"retrying in {backoff_time:.2f} seconds: {e}")
                time.sleep(backoff_time)
    
    def list_records(self, metadata_prefix: str = 'oai_dc',
                    set_spec: Optional[str] = None,
                    from_date: Optional[str] = None,
                    until_date: Optional[str] = None) -> Generator:
        """
        List records from OAI-PMH endpoint with resumption token support.
        
        Args:
            metadata_prefix: Metadata format (default: oai_dc)
            set_spec: Set specification for selective harvesting
            from_date: Start date (ISO format)
            until_date: End date (ISO format)
            
        Yields:
            Record objects from OAI-PMH response
        """
        self.logger.info(f"Starting record harvest: prefix={metadata_prefix}, "
                        f"set={set_spec}, from={from_date}, until={until_date}")
        
        def _list_records():
            return self.sickle.ListRecords(
                metadataPrefix=metadata_prefix,
                set=set_spec,
                **({'from': from_date} if from_date else {}),
                **({'until': until_date} if until_date else {})
            )
        
        try:
            records_iterator = self._retry_with_backoff(_list_records)
            
            record_count = 0
            
            for record in records_iterator:
                # Handle deleted records
                if hasattr(record, 'deleted') and record.deleted:
                    self.logger.debug(f"Skipping deleted record: {getattr(record, 'identifier', 'unknown')}")
                    continue
                
                record_count += 1
                
                # Log progress periodically
                if record_count % 100 == 0:
                    self.logger.info(f"Harvested {record_count} records...")
                
                yield record
            
            self.logger.info(f"Record harvest completed: {record_count} records harvested")
            
        except Exception as e:
            self.logger.error(f"Record harvesting failed: {e}")
            raise
    
    def get_record(self, identifier: str, metadata_prefix: str = 'oai_dc'):
        """
        Get a single record by identifier.
        
        Args:
            identifier: OAI identifier
            metadata_prefix: Metadata format
            
        Returns:
            Record object or None if not found
        """
        self.logger.debug(f"Fetching single record: {identifier}")
        
        def _get_record():
            return self.sickle.GetRecord(
                identifier=identifier,
                metadataPrefix=metadata_prefix
            )
        
        try:
            return self._retry_with_backoff(_get_record)
        except IdDoesNotExist:
            self.logger.warning(f"Record not found: {identifier}")
            return None
        except Exception as e:
            self.logger.error(f"Error fetching record {identifier}: {e}")
            raise
    
    def identify(self) -> Dict[str, Any]:
        """
        Get repository information.
        
        Returns:
            Dictionary with repository information
        """
        def _identify():
            return self.sickle.Identify()
        
        try:
            identify_response = self._retry_with_backoff(_identify)
            
            info = {
                'repository_name': identify_response.repositoryName,
                'base_url': identify_response.baseURL,
                'protocol_version': identify_response.protocolVersion,
                'earliest_datestamp': identify_response.earliestDatestamp,
                'deleted_record': identify_response.deletedRecord,
                'granularity': identify_response.granularity,
                'admin_email': getattr(identify_response, 'adminEmail', [])
            }
            
            self.logger.info(f"Repository info: {info['repository_name']}")
            return info
            
        except Exception as e:
            self.logger.error(f"Error getting repository info: {e}")
            raise ScrapingError(f"Failed to get repository information: {e}")
    
    def list_sets(self) -> Generator:
        """
        List available sets.
        
        Yields:
            Set objects
        """
        def _list_sets():
            return self.sickle.ListSets()
        
        try:
            sets_iterator = self._retry_with_backoff(_list_sets)
            
            for set_obj in sets_iterator:
                yield {
                    'spec': set_obj.setSpec,
                    'name': set_obj.setName,
                    'description': getattr(set_obj, 'setDescription', '')
                }
                
        except NoSetHierarchy:
            self.logger.info("Repository does not support sets")
            return
        except Exception as e:
            self.logger.error(f"Error listing sets: {e}")
            raise
    
    def list_metadata_formats(self, identifier: Optional[str] = None) -> Generator:
        """
        List available metadata formats.
        
        Args:
            identifier: Optional record identifier
            
        Yields:
            Metadata format objects
        """
        def _list_metadata_formats():
            return self.sickle.ListMetadataFormats(
                identifier=identifier
            )
        
        try:
            formats_iterator = self._retry_with_backoff(_list_metadata_formats)
            
            for format_obj in formats_iterator:
                yield {
                    'prefix': format_obj.metadataPrefix,
                    'schema': format_obj.schema,
                    'namespace': format_obj.metadataNamespace
                }
                
        except Exception as e:
            self.logger.error(f"Error listing metadata formats: {e}")
            raise
    
    def health_check(self) -> bool:
        """
        Perform health check by calling Identify.
        
        Returns:
            True if healthy, False otherwise
        """
        try:
            self.identify()
            return True
        except Exception as e:
            self.logger.error(f"Health check failed: {e}")
            return False