# -*- coding: utf-8 -*-
"""
数据存储管理模块
"""

import logging
from typing import List, Dict, Any, Optional
from sqlalchemy.exc import IntegrityError
from datetime import datetime

from src.database.connection import db_manager
from src.database.models import RealEstateLead, CrawlTask, DataQualityLog
from src.database.init_data import get_city_by_name, get_business_type_by_name
from src.data.data_cleaner import DataCleaner
from src.data.data_deduplicator import DataDeduplicator


class DataStorageManager:
    """数据存储管理器 - 任务1.3.2"""

    def __init__(self):
        self.logger = logging.getLogger(__name__)
        self.data_cleaner = DataCleaner()
        self.data_deduplicator = DataDeduplicator()
    
    def save_leads_data(self, leads_data: List[Dict[str, Any]], task_id: Optional[int] = None) -> Dict[str, int]:
        """保存线索数据到数据库"""
        try:
            self.logger.info(f"开始处理 {len(leads_data)} 条线索数据")

            # 步骤1：数据清洗
            cleaned_data = self.data_cleaner.clean_data_batch(leads_data)
            self.logger.info(f"数据清洗完成，有效数据 {len(cleaned_data)} 条")

            # 步骤2：数据去重
            deduplicated_data = self.data_deduplicator.deduplicate_data(cleaned_data)
            self.logger.info(f"数据去重完成，最终数据 {len(deduplicated_data)} 条")

            stats = {
                'total': len(leads_data),
                'cleaned': len(cleaned_data),
                'deduplicated': len(deduplicated_data),
                'new': 0,
                'updated': 0,
                'failed': 0
            }
            
            # 步骤3：保存到数据库
            with db_manager.get_session() as session:
                for i, lead_data in enumerate(deduplicated_data):
                    try:
                        # 生成数据指纹
                        fingerprint = self.data_deduplicator.generate_fingerprint(lead_data)
                        lead_data['data_fingerprint'] = fingerprint

                        # 检查是否已存在相同指纹的数据
                        existing_lead = session.query(RealEstateLead).filter_by(
                            data_fingerprint=fingerprint
                        ).first()

                        if existing_lead:
                            # 更新现有记录
                            self._update_lead_record(existing_lead, lead_data)
                            stats['updated'] += 1
                            self.logger.debug(f"更新记录: {fingerprint}")
                        else:
                            # 创建新记录
                            new_lead = self._create_lead_record(lead_data)
                            if new_lead:
                                session.add(new_lead)
                                stats['new'] += 1
                                self.logger.debug(f"新增记录: {fingerprint}")

                        # 每100条提交一次
                        if (i + 1) % 100 == 0:
                            session.commit()
                            self.logger.info(f"已处理 {i + 1}/{len(deduplicated_data)} 条数据")

                    except Exception as e:
                        self.logger.error(f"保存第 {i+1} 条数据失败: {e}")
                        stats['failed'] += 1
                        session.rollback()
                        continue
                
                # 最终提交
                session.commit()
            
            # 添加最终统计
            stats['final_count'] = stats['new'] + stats['updated']

            self.logger.info(f"数据保存完成: 新增 {stats['new']}, 更新 {stats['updated']}, 失败 {stats['failed']}, 最终 {stats['final_count']}")
            return stats
            
        except Exception as e:
            self.logger.error(f"保存线索数据失败: {e}")
            return {
                'total': len(leads_data),
                'cleaned': 0,
                'deduplicated': 0,
                'new': 0,
                'updated': 0,
                'failed': len(leads_data),
                'final_count': 0
            }
    
    def _create_lead_record(self, lead_data: Dict[str, Any]) -> Optional[RealEstateLead]:
        """创建线索记录"""
        try:
            return RealEstateLead(
                city=lead_data.get('city', ''),
                contact_person=lead_data.get('contact_person', ''),
                contact_info=lead_data.get('contact_info', ''),
                business_type=lead_data.get('business_type', ''),
                acceptable_rent=lead_data.get('acceptable_rent', ''),
                building_area=lead_data.get('building_area', ''),
                publish_date=lead_data.get('publish_time'),  # 修正字段名
                detail_url=lead_data.get('detail_url', ''),
                crawl_time=datetime.now(),
                data_source=lead_data.get('data_source', 'pupuwang'),
                data_status='active',
                data_fingerprint=lead_data.get('data_fingerprint', ''),
                created_at=datetime.now(),
                updated_at=datetime.now()
            )
        except Exception as e:
            self.logger.error(f"创建线索记录失败: {e}")
            return None
    
    def _update_lead_record(self, existing_lead: RealEstateLead, lead_data: Dict[str, Any]) -> None:
        """更新现有线索记录"""
        # 更新可能变化的字段
        existing_lead.contact_person = lead_data.get('contact_person', existing_lead.contact_person)
        existing_lead.contact_info = lead_data.get('contact_info', existing_lead.contact_info)
        existing_lead.acceptable_rent = lead_data.get('acceptable_rent', existing_lead.acceptable_rent)
        existing_lead.building_area = lead_data.get('building_area', existing_lead.building_area)
        existing_lead.data_status = lead_data.get('data_status', existing_lead.data_status)
        existing_lead.updated_at = datetime.now()
        
        # 更新爬取时间
        if lead_data.get('crawl_time'):
            existing_lead.crawl_time = lead_data['crawl_time']
    
    def create_crawl_task(self, task_name: str, city: str, business_type: str) -> int:
        """创建爬虫任务记录"""
        try:
            with db_manager.get_session() as session:
                # 获取城市和业态ID
                city_obj = get_city_by_name(session, city)
                business_obj = get_business_type_by_name(session, business_type)
                
                task = CrawlTask(
                    task_name=task_name,
                    city_id=city_obj.id if city_obj else None,
                    business_type_id=business_obj.id if business_obj else None,
                    status='pending',
                    start_time=datetime.now()
                )
                
                session.add(task)
                session.commit()
                
                self.logger.info(f"创建爬虫任务: {task_name} (ID: {task.id})")
                return task.id
                
        except Exception as e:
            self.logger.error(f"创建爬虫任务失败: {e}")
            return 0

    def update_crawl_task_status(self, task_id: int, status: str, message: str = "") -> bool:
        """更新爬虫任务状态"""
        try:
            with db_manager.get_session() as session:
                task = session.query(CrawlTask).filter_by(id=task_id).first()
                if task:
                    task.status = status
                    task.end_time = datetime.now()
                    if message:
                        task.error_message = message
                    session.commit()
                    self.logger.info(f"更新任务状态: {task_id} -> {status}")
                    return True
                else:
                    self.logger.warning(f"任务不存在: {task_id}")
                    return False
        except Exception as e:
            self.logger.error(f"更新任务状态失败: {e}")
            return False
    
    def update_task_status(self, task_id: int, status: str, **kwargs) -> bool:
        """更新任务状态"""
        try:
            with db_manager.get_session() as session:
                task = session.query(CrawlTask).filter_by(id=task_id).first()
                
                if not task:
                    self.logger.error(f"任务不存在: {task_id}")
                    return False
                
                task.status = status
                
                # 更新其他字段
                if status == 'completed':
                    task.end_time = datetime.now()
                
                for key, value in kwargs.items():
                    if hasattr(task, key):
                        setattr(task, key, value)
                
                session.commit()
                self.logger.info(f"更新任务状态: {task_id} -> {status}")
                return True
                
        except Exception as e:
            self.logger.error(f"更新任务状态失败: {e}")
            return False
    
    def save_quality_log(self, task_id: int, quality_report: Dict[str, Any]) -> bool:
        """保存数据质量日志"""
        try:
            with db_manager.get_session() as session:
                quality_log = DataQualityLog(
                    crawl_task_id=task_id,
                    total_records=quality_report.get('total_records', 0),
                    valid_records=quality_report.get('valid_records', 0),
                    invalid_records=quality_report.get('total_records', 0) - quality_report.get('valid_records', 0),
                    duplicate_records=0,  # 这里可以从去重结果中获取
                    missing_contact_person=0,  # 可以从字段完整性中计算
                    missing_contact_info=0,
                    missing_rent_info=0,
                    missing_area_info=0,
                    outdated_records=0
                )
                
                session.add(quality_log)
                session.commit()
                
                self.logger.info(f"保存质量日志: 任务 {task_id}")
                return True
                
        except Exception as e:
            self.logger.error(f"保存质量日志失败: {e}")
            return False
    
    def get_leads_data(self, city: str = None, business_type: str = None, limit: int = None) -> List[Dict[str, Any]]:
        """获取线索数据"""
        try:
            with db_manager.get_session() as session:
                query = session.query(RealEstateLead)
                
                # 添加筛选条件
                if city:
                    query = query.filter(RealEstateLead.city == city)
                
                if business_type:
                    query = query.filter(RealEstateLead.business_type == business_type)
                
                # 按创建时间倒序
                query = query.order_by(RealEstateLead.created_at.desc())
                
                # 限制数量
                if limit:
                    query = query.limit(limit)
                
                leads = query.all()
                
                # 转换为字典列表
                results = []
                for lead in leads:
                    lead_dict = {
                        'id': lead.id,
                        'city': lead.city,
                        'contact_person': lead.contact_person,
                        'contact_info': lead.contact_info,
                        'business_type': lead.business_type,
                        'acceptable_rent': lead.acceptable_rent,
                        'building_area': lead.building_area,
                        'publish_date': lead.publish_date,
                        'detail_url': lead.detail_url,
                        'crawl_time': lead.crawl_time,
                        'data_source': lead.data_source,
                        'data_status': lead.data_status,
                        'created_at': lead.created_at,
                        'updated_at': lead.updated_at
                    }
                    results.append(lead_dict)
                
                self.logger.info(f"获取线索数据: {len(results)} 条")
                return results
                
        except Exception as e:
            self.logger.error(f"获取线索数据失败: {e}")
            return []
    
    def get_task_statistics(self, task_id: int) -> Dict[str, Any]:
        """获取任务统计信息"""
        try:
            with db_manager.get_session() as session:
                task = session.query(CrawlTask).filter_by(id=task_id).first()
                
                if not task:
                    return {}
                
                stats = {
                    'task_id': task.id,
                    'task_name': task.task_name,
                    'status': task.status,
                    'start_time': task.start_time,
                    'end_time': task.end_time,
                    'total_pages': task.total_pages,
                    'processed_pages': task.processed_pages,
                    'total_records': task.total_records,
                    'new_records': task.new_records,
                    'updated_records': task.updated_records,
                    'error_message': task.error_message
                }
                
                return stats
                
        except Exception as e:
            self.logger.error(f"获取任务统计失败: {e}")
            return {}
    
    def cleanup_old_data(self, days: int = 90) -> int:
        """清理旧数据"""
        try:
            cutoff_date = datetime.now() - timedelta(days=days)
            
            with db_manager.get_session() as session:
                # 删除旧的线索数据
                deleted_count = session.query(RealEstateLead).filter(
                    RealEstateLead.created_at < cutoff_date
                ).delete()
                
                session.commit()
                
                self.logger.info(f"清理旧数据: 删除 {deleted_count} 条记录")
                return deleted_count
                
        except Exception as e:
            self.logger.error(f"清理旧数据失败: {e}")
            return 0
    
    def get_database_stats(self) -> Dict[str, int]:
        """获取数据库统计信息"""
        try:
            with db_manager.get_session() as session:
                stats = {
                    'total_leads': session.query(RealEstateLead).count(),
                    'active_leads': session.query(RealEstateLead).filter_by(data_status='active').count(),
                    'total_tasks': session.query(CrawlTask).count(),
                    'completed_tasks': session.query(CrawlTask).filter_by(status='completed').count(),
                    'cities_count': session.query(RealEstateLead.city).distinct().count(),
                    'business_types_count': session.query(RealEstateLead.business_type).distinct().count()
                }
                
                return stats
                
        except Exception as e:
            self.logger.error(f"获取数据库统计失败: {e}")
            return {}
