from typing import Dict, Any
import psycopg2
from psycopg2.extras import Json
import structlog
from datetime import datetime
import os


class PostgreSQLPipeline:
    
    def __init__(self):
        self.logger = structlog.get_logger(self.__class__.__name__)
        self.connection = None
        self.cursor = None
        self.stats = {
            "processed": 0,
            "errors": 0
        }
    
    @classmethod
    def from_crawler(cls, crawler):
        return cls()
    
    def open_spider(self, spider):
        try:
            # PostgreSQL connection settings
            self.connection = psycopg2.connect(
                host=os.getenv("POSTGRES_HOST", "postgres"),
                port=int(os.getenv("POSTGRES_PORT", 5432)),
                database=os.getenv("POSTGRES_DB", "ai_writing"),
                user=os.getenv("POSTGRES_USER", "admin"),
                password=os.getenv("POSTGRES_PASSWORD", "admin123")
            )
            self.cursor = self.connection.cursor()
            
            # Create crawler_runs table if not exists
            self.cursor.execute("""
                CREATE TABLE IF NOT EXISTS crawler_runs (
                    id SERIAL PRIMARY KEY,
                    source_name VARCHAR(100) NOT NULL,
                    spider_name VARCHAR(100) NOT NULL,
                    start_time TIMESTAMP NOT NULL,
                    end_time TIMESTAMP,
                    status VARCHAR(20),
                    items_scraped INTEGER DEFAULT 0,
                    items_failed INTEGER DEFAULT 0,
                    requests_made INTEGER DEFAULT 0,
                    errors_count INTEGER DEFAULT 0,
                    error_messages TEXT[],
                    metadata JSONB,
                    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
                );
                
                CREATE INDEX IF NOT EXISTS idx_crawler_runs_source ON crawler_runs(source_name);
                CREATE INDEX IF NOT EXISTS idx_crawler_runs_start ON crawler_runs(start_time);
            """)
            
            # Start a new run record
            self.cursor.execute("""
                INSERT INTO crawler_runs (source_name, spider_name, start_time, status)
                VALUES (%s, %s, %s, 'running')
                RETURNING id
            """, (spider.name, spider.name, datetime.now()))
            
            result = self.cursor.fetchone()
            self.run_id = result[0]
            self.connection.commit()
            
            self.logger.info(
                "PostgreSQL pipeline connected",
                spider=spider.name,
                run_id=self.run_id
            )
            
        except Exception as e:
            self.logger.error(
                "Failed to connect to PostgreSQL",
                error=str(e),
                spider=spider.name
            )
            raise
    
    def process_item(self, item: Dict[str, Any], spider):
        try:
            self.stats["processed"] += 1
            
            # Update crawler config last run time
            source = item.get("source", spider.name)
            self.cursor.execute("""
                UPDATE crawler_configs
                SET last_run_at = %s, last_success_at = %s
                WHERE source_name = %s
            """, (datetime.now(), datetime.now(), source))
            
            # Log important events (optional)
            if item.get("error"):
                self.cursor.execute("""
                    UPDATE crawler_configs
                    SET last_error = %s
                    WHERE source_name = %s
                """, (str(item["error"]), source))
            
            self.connection.commit()
            
        except Exception as e:
            self.stats["errors"] += 1
            self.connection.rollback()
            self.logger.error(
                "Failed to update PostgreSQL",
                error=str(e),
                spider=spider.name
            )
        
        return item
    
    def close_spider(self, spider):
        if self.connection:
            try:
                # Update run record
                stats = spider.crawler.stats.get_stats()
                
                self.cursor.execute("""
                    UPDATE crawler_runs
                    SET end_time = %s, 
                        status = %s,
                        items_scraped = %s,
                        items_failed = %s,
                        requests_made = %s,
                        errors_count = %s,
                        metadata = %s
                    WHERE id = %s
                """, (
                    datetime.now(),
                    "completed",
                    stats.get("item_scraped_count", 0),
                    stats.get("item_dropped_count", 0),
                    stats.get("downloader/request_count", 0),
                    stats.get("log_count/ERROR", 0),
                    Json({
                        "finish_reason": stats.get("finish_reason", "unknown"),
                        "duration": stats.get("elapsed_time_seconds", 0),
                        "response_count": stats.get("response_received_count", 0)
                    }),
                    self.run_id
                ))
                
                self.connection.commit()
                
                self.logger.info(
                    "PostgreSQL pipeline statistics",
                    spider=spider.name,
                    run_id=self.run_id,
                    **self.stats
                )
                
            except Exception as e:
                self.logger.error(
                    "Failed to update run statistics",
                    error=str(e),
                    spider=spider.name
                )
            finally:
                self.cursor.close()
                self.connection.close()