import json
import csv
import os
import sqlite3
import pandas as pd
from typing import List, Dict, Any, Optional, Union
from sqlalchemy import create_engine, Column, Integer, String, Text, DateTime, MetaData, Table
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from datetime import datetime
import logging

logger = logging.getLogger(__name__)

Base = declarative_base()

class CrawlData(Base):
    __tablename__ = 'crawl_data'
    
    id = Column(Integer, primary_key=True)
    url = Column(String(500))
    title = Column(String(200))
    content = Column(Text)
    timestamp = Column(DateTime, default=datetime.now)
    metadata = Column(Text)  # JSON string for additional data

class DataStorage:
    def __init__(self, config=None):
        self.config = config
        self.storage_config = config.get('storage_settings', {}) if config else {}
        self.output_dir = self.storage_config.get('output_dir', 'output')
        self.ensure_output_dir()
        
        # 数据库连接
        self.db_engine = None
        self.db_session = None
        self._setup_database()
    
    def ensure_output_dir(self):
        """确保输出目录存在"""
        if not os.path.exists(self.output_dir):
            os.makedirs(self.output_dir)
            logger.info(f"Created output directory: {self.output_dir}")
    
    def _setup_database(self):
        """设置数据库连接"""
        try:
            db_config = self.storage_config.get('database', {})
            connection_string = db_config.get('connection_string', 'sqlite:///crawler_data.db')
            
            self.db_engine = create_engine(connection_string)
            Base.metadata.create_all(self.db_engine)
            
            Session = sessionmaker(bind=self.db_engine)
            self.db_session = Session()
            
            logger.info(f"Database initialized: {connection_string}")
            
        except Exception as e:
            logger.error(f"Failed to setup database: {str(e)}")
    
    def save_to_json(self, data: Union[Dict, List], filename: str, indent: int = 2) -> bool:
        """保存数据到JSON文件"""
        try:
            filepath = os.path.join(self.output_dir, filename)
            
            # 如果文件已存在且数据是列表，则追加数据
            if os.path.exists(filepath) and isinstance(data, dict):
                try:
                    with open(filepath, 'r', encoding='utf-8') as f:
                        existing_data = json.load(f)
                    
                    if isinstance(existing_data, list):
                        existing_data.append(data)
                        data = existing_data
                    elif isinstance(existing_data, dict):
                        # 如果现有数据是字典，创建一个列表
                        data = [existing_data, data]
                        
                except (json.JSONDecodeError, FileNotFoundError):
                    pass
            
            with open(filepath, 'w', encoding='utf-8') as f:
                json.dump(data, f, indent=indent, ensure_ascii=False)
            
            logger.info(f"Data saved to JSON: {filepath}")
            return True
            
        except Exception as e:
            logger.error(f"Failed to save JSON file: {str(e)}")
            return False
    
    def save_to_csv(self, data: Union[List[Dict], pd.DataFrame], filename: str, mode: str = 'w') -> bool:
        """保存数据到CSV文件"""
        try:
            filepath = os.path.join(self.output_dir, filename)
            
            if isinstance(data, pd.DataFrame):
                df = data
            elif isinstance(data, list) and len(data) > 0:
                df = pd.DataFrame(data)
            else:
                logger.warning("No data to save to CSV")
                return False
            
            # 检查文件是否存在，如果存在且模式为追加，则不写入表头
            write_header = True
            if mode == 'a' and os.path.exists(filepath):
                write_header = False
            
            df.to_csv(filepath, index=False, mode=mode, header=write_header, encoding='utf-8')
            logger.info(f"Data saved to CSV: {filepath}")
            return True
            
        except Exception as e:
            logger.error(f"Failed to save CSV file: {str(e)}")
            return False
    
    def save_to_database(self, data: Dict[str, Any], table_name: str = 'crawl_data') -> bool:
        """保存数据到数据库"""
        try:
            if not self.db_session:
                logger.error("Database session not initialized")
                return False
            
            if table_name == 'crawl_data':
                # 使用预定义的CrawlData模型
                record = CrawlData(
                    url=data.get('url'),
                    title=data.get('title'),
                    content=data.get('content'),
                    metadata=json.dumps(data.get('metadata', {}), ensure_ascii=False)
                )
                self.db_session.add(record)
            else:
                # 动态创建表并插入数据
                self._save_to_dynamic_table(data, table_name)
            
            self.db_session.commit()
            logger.info(f"Data saved to database table: {table_name}")
            return True
            
        except Exception as e:
            logger.error(f"Failed to save to database: {str(e)}")
            self.db_session.rollback()
            return False
    
    def _save_to_dynamic_table(self, data: Dict[str, Any], table_name: str):
        """动态创建表并保存数据"""
        metadata = MetaData()
        
        # 创建列定义
        columns = [Column('id', Integer, primary_key=True)]
        for key, value in data.items():
            if isinstance(value, str):
                if len(value) > 500:
                    columns.append(Column(key, Text))
                else:
                    columns.append(Column(key, String(500)))
            else:
                columns.append(Column(key, Text))  # 其他类型转为文本存储
        
        # 创建表
        table = Table(table_name, metadata, *columns)
        metadata.create_all(self.db_engine)
        
        # 插入数据
        insert_data = {}
        for key, value in data.items():
            if not isinstance(value, str):
                insert_data[key] = json.dumps(value, ensure_ascii=False)
            else:
                insert_data[key] = value
        
        self.db_engine.execute(table.insert().values(**insert_data))
    
    def load_from_json(self, filename: str) -> Optional[Union[Dict, List]]:
        """从JSON文件加载数据"""
        try:
            filepath = os.path.join(self.output_dir, filename)
            with open(filepath, 'r', encoding='utf-8') as f:
                data = json.load(f)
            logger.info(f"Data loaded from JSON: {filepath}")
            return data
        except Exception as e:
            logger.error(f"Failed to load JSON file: {str(e)}")
            return None
    
    def load_from_csv(self, filename: str) -> Optional[pd.DataFrame]:
        """从CSV文件加载数据"""
        try:
            filepath = os.path.join(self.output_dir, filename)
            df = pd.read_csv(filepath, encoding='utf-8')
            logger.info(f"Data loaded from CSV: {filepath}")
            return df
        except Exception as e:
            logger.error(f"Failed to load CSV file: {str(e)}")
            return None
    
    def query_database(self, query: str, params: Dict = None) -> Optional[List[Dict]]:
        """执行数据库查询"""
        try:
            if not self.db_engine:
                logger.error("Database engine not initialized")
                return None
            
            result = self.db_engine.execute(query, params or {})
            columns = result.keys()
            data = [dict(zip(columns, row)) for row in result.fetchall()]
            
            logger.info(f"Database query executed, returned {len(data)} rows")
            return data
            
        except Exception as e:
            logger.error(f"Database query failed: {str(e)}")
            return None
    
    def export_to_excel(self, data: Union[List[Dict], pd.DataFrame], filename: str, sheet_name: str = 'Sheet1') -> bool:
        """导出数据到Excel文件"""
        try:
            filepath = os.path.join(self.output_dir, filename)
            
            if isinstance(data, pd.DataFrame):
                df = data
            elif isinstance(data, list) and len(data) > 0:
                df = pd.DataFrame(data)
            else:
                logger.warning("No data to export to Excel")
                return False
            
            with pd.ExcelWriter(filepath, engine='openpyxl') as writer:
                df.to_excel(writer, sheet_name=sheet_name, index=False)
            
            logger.info(f"Data exported to Excel: {filepath}")
            return True
            
        except Exception as e:
            logger.error(f"Failed to export Excel file: {str(e)}")
            return False
    
    def get_storage_stats(self) -> Dict[str, Any]:
        """获取存储统计信息"""
        stats = {
            'output_dir': self.output_dir,
            'files': [],
            'database_records': 0
        }
        
        # 统计文件
        try:
            for filename in os.listdir(self.output_dir):
                filepath = os.path.join(self.output_dir, filename)
                if os.path.isfile(filepath):
                    stats['files'].append({
                        'name': filename,
                        'size': os.path.getsize(filepath),
                        'modified': datetime.fromtimestamp(os.path.getmtime(filepath))
                    })
        except Exception as e:
            logger.error(f"Error getting file stats: {str(e)}")
        
        # 统计数据库记录
        try:
            if self.db_session:
                count = self.db_session.query(CrawlData).count()
                stats['database_records'] = count
        except Exception as e:
            logger.error(f"Error getting database stats: {str(e)}")
        
        return stats
    
    def cleanup_old_files(self, days: int = 30) -> bool:
        """清理旧文件"""
        try:
            import time
            current_time = time.time()
            removed_count = 0
            
            for filename in os.listdir(self.output_dir):
                filepath = os.path.join(self.output_dir, filename)
                if os.path.isfile(filepath):
                    file_time = os.path.getmtime(filepath)
                    if (current_time - file_time) > (days * 24 * 3600):
                        os.remove(filepath)
                        removed_count += 1
                        logger.info(f"Removed old file: {filename}")
            
            logger.info(f"Cleanup completed, removed {removed_count} files")
            return True
            
        except Exception as e:
            logger.error(f"Cleanup failed: {str(e)}")
            return False
    
    def close(self):
        """关闭数据库连接"""
        if self.db_session:
            self.db_session.close()
            logger.info("Database session closed")
        if self.db_engine:
            self.db_engine.dispose()
            logger.info("Database engine disposed")