#!/usr/bin/env python3
"""
滴滴车主端智能客服数据收集执行脚本
"""

import os
import sys
import json
import logging
from datetime import datetime

# 添加项目路径
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))

from ai.services.data_crawler import DataCrawler
from ai.services.data_manager import DataManager
from ai.services.elasticsearch_service import ElasticsearchService
from ai.services.smart_customer_service import SmartCustomerService

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
    handlers=[
        logging.FileHandler('Didi/ai/data/collection.log'),
        logging.StreamHandler()
    ]
)
logger = logging.getLogger(__name__)

def main():
    """主执行函数"""
    logger.info("开始执行数据收集流程...")
    
    try:
        # 1. 初始化服务
        logger.info("初始化服务...")
        crawler = DataCrawler()
        data_manager = DataManager()
        es_service = ElasticsearchService()
        smart_service = SmartCustomerService()
        
        # 2. 检查ES连接
        logger.info("检查Elasticsearch连接...")
        if es_service.es and es_service.es.ping():
            logger.info("Elasticsearch连接成功")
        else:
            logger.warning("Elasticsearch连接失败，将使用向量搜索作为备选")
        
        # 3. 执行数据爬取
        logger.info("开始数据爬取...")
        crawl_result = crawler.run_full_crawl()
        
        if crawl_result.get('status') == 'success':
            logger.info(f"数据爬取完成: {crawl_result}")
            
            # 4. 同步数据到ES
            if es_service.es and es_service.es.ping():
                logger.info("开始同步数据到Elasticsearch...")
                
                # 读取处理后的数据
                raw_data_path = crawl_result.get('raw_data_path')
                if raw_data_path and os.path.exists(raw_data_path):
                    with open(raw_data_path, 'r', encoding='utf-8') as f:
                        processed_data = json.load(f)
                    
                    # 同步到ES
                    es_success = es_service.reindex_data(processed_data)
                    if es_success:
                        logger.info("数据同步到Elasticsearch成功")
                    else:
                        logger.error("数据同步到Elasticsearch失败")
                else:
                    logger.error("找不到处理后的数据文件")
            

            
            # 6. 获取统计信息
            logger.info("获取统计信息...")
            stats = smart_service.get_knowledge_statistics()
            logger.info(f"知识库统计: {json.dumps(stats, ensure_ascii=False, indent=2)}")
            
            # 7. 获取ES健康状态
            es_health = smart_service.get_es_health_status()
            logger.info(f"ES健康状态: {json.dumps(es_health, ensure_ascii=False, indent=2)}")
            
            logger.info("数据收集流程执行完成！")
            
        else:
            logger.error(f"数据爬取失败: {crawl_result}")
            
    except Exception as e:
        logger.error(f"执行过程中发生错误: {e}")
        raise

if __name__ == "__main__":
    main() 