"""
Main application entry point for food delivery platform scraper
"""
import argparse
import logging
import sys
from typing import List
from pathlib import Path

# Add src to path for imports
sys.path.append(str(Path(__file__).parent))

from config import config, PLATFORMS
from models import DatabaseManager
from scrapers.eleme_scraper import ElemeScraper


def setup_logging():
    """Configure logging for the application"""
    logging.basicConfig(
        level=getattr(logging, config.LOG_LEVEL),
        format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
        handlers=[
            logging.FileHandler(config.LOG_FILE),
            logging.StreamHandler(sys.stdout)
        ]
    )


def create_scraper(platform: str, db_manager: DatabaseManager):
    """Factory function to create appropriate scraper"""
    if platform == 'eleme':
        return ElemeScraper(db_manager)
    elif platform == 'meituan':
        # TODO: Implement MeituanScraper in Phase 2
        raise NotImplementedError("Meituan scraper not yet implemented")
    elif platform == 'dianping':
        # TODO: Implement DianpingScraper in Phase 3
        raise NotImplementedError("Dianping scraper not yet implemented")
    else:
        raise ValueError(f"Unsupported platform: {platform}")


def scrape_platform(platform: str, cities: List[str], limit: int = None):
    """Scrape specified platform for given cities"""
    logger = logging.getLogger(__name__)
    logger.info(f"Starting scraping for platform: {platform}")
    
    # Initialize database
    db_manager = DatabaseManager(config.DATABASE_URL)
    db_manager.create_tables()
    
    # Create scraper
    scraper = create_scraper(platform, db_manager)
    
    total_scraped = 0
    
    for city in cities:
        try:
            logger.info(f"Scraping {platform} for city: {city}")
            
            with scraper:
                scraped_data = scraper.scrape_city(city, limit)
                total_scraped += len(scraped_data)
                
                logger.info(f"Successfully scraped {len(scraped_data)} restaurants from {city}")
                
        except Exception as e:
            logger.error(f"Failed to scrape {city} on {platform}: {e}")
            continue
            
    logger.info(f"Scraping completed. Total restaurants scraped: {total_scraped}")
    return total_scraped


def list_scraped_data(platform: str = None, city: str = None, limit: int = 10):
    """List scraped restaurant data"""
    db_manager = DatabaseManager(config.DATABASE_URL)
    restaurants = db_manager.get_restaurants(platform=platform, city=city, limit=limit)
    
    print(f"\n=== Scraped Restaurant Data ===")
    print(f"Found {len(restaurants)} restaurants")
    print("-" * 80)
    
    for restaurant in restaurants:
        print(f"Name: {restaurant.name}")
        print(f"Category: {restaurant.category or 'N/A'}")
        print(f"City: {restaurant.city}")
        print(f"Location: {restaurant.location or 'N/A'}")
        print(f"Price Range: {restaurant.price_range or 'N/A'}")
        print(f"Phone: {restaurant.phone or 'N/A'}")
        print(f"Platform: {restaurant.platform}")
        print(f"Scraped: {restaurant.scraped_at}")
        print("-" * 80)


def main():
    """Main CLI interface"""
    parser = argparse.ArgumentParser(description="Food Delivery Platform Scraper")
    
    subparsers = parser.add_subparsers(dest='command', help='Available commands')
    
    # Scrape command
    scrape_parser = subparsers.add_parser('scrape', help='Scrape restaurant data')
    scrape_parser.add_argument('platform', choices=['eleme', 'meituan', 'dianping'], 
                              help='Platform to scrape')
    scrape_parser.add_argument('--cities', nargs='+', default=['北京'], 
                              help='Cities to scrape (default: 北京)')
    scrape_parser.add_argument('--limit', type=int, 
                              help='Maximum restaurants per city')
    
    # List command
    list_parser = subparsers.add_parser('list', help='List scraped data')
    list_parser.add_argument('--platform', choices=['eleme', 'meituan', 'dianping'],
                            help='Filter by platform')
    list_parser.add_argument('--city', help='Filter by city')
    list_parser.add_argument('--limit', type=int, default=10,
                            help='Maximum results to show (default: 10)')
    
    # Test command
    test_parser = subparsers.add_parser('test', help='Test scraper functionality')
    test_parser.add_argument('platform', choices=['eleme', 'meituan', 'dianping'],
                            help='Platform to test')
    test_parser.add_argument('--city', default='北京', help='City to test (default: 北京)')
    
    # Info command
    info_parser = subparsers.add_parser('info', help='Show system information')
    
    args = parser.parse_args()
    
    # Setup logging
    setup_logging()
    logger = logging.getLogger(__name__)
    
    if args.command == 'scrape':
        try:
            scrape_platform(args.platform, args.cities, args.limit)
        except KeyboardInterrupt:
            logger.info("Scraping interrupted by user")
        except Exception as e:
            logger.error(f"Scraping failed: {e}")
            sys.exit(1)
            
    elif args.command == 'list':
        try:
            list_scraped_data(args.platform, args.city, args.limit)
        except Exception as e:
            logger.error(f"Failed to list data: {e}")
            sys.exit(1)
            
    elif args.command == 'test':
        try:
            logger.info(f"Testing {args.platform} scraper for {args.city}")
            scrape_platform(args.platform, [args.city], limit=5)
            logger.info("Test completed successfully")
        except Exception as e:
            logger.error(f"Test failed: {e}")
            sys.exit(1)
            
    elif args.command == 'info':
        print(f"\n=== Food Delivery Platform Scraper ===")
        print(f"Supported platforms: {', '.join(PLATFORMS.keys())}")
        print(f"Target cities: {', '.join(config.TARGET_CITIES)}")
        print(f"Database: {config.DATABASE_URL}")
        print(f"Log level: {config.LOG_LEVEL}")
        print(f"Headless mode: {config.HEADLESS_MODE}")
        print(f"Max restaurants per city: {config.MAX_RESTAURANTS_PER_CITY}")
        
    else:
        parser.print_help()


if __name__ == "__main__":
    main()
