#!/usr/bin/env python3
"""
JH子系统AI聊天功能启动和测试脚本
"""

import asyncio
import sys
import os

# 添加项目根目录到路径
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))

async def test_ollama_service():
    """测试Ollama服务连接"""
    print("🔍 测试Ollama服务连接...")
    
    try:
        from src.services.ollama_service import OllamaService
        
        async with OllamaService() as service:
            # 检查服务可用性
            is_available = await service.is_available()
            if not is_available:
                print("❌ Ollama服务不可用")
                print("请确保Ollama已经启动并运行在 http://localhost:11434")
                return False
                
            print("✅ Ollama服务连接成功")
            
            # 获取可用模型
            models = await service.list_models()
            if not models:
                print("❌ 没有找到可用的模型")
                print("请使用 'ollama pull qwen2.5' 下载Qwen模型")
                return False
                
            print(f"📋 可用模型: {', '.join([m.get('name', '') for m in models])}")
            
            # 检查qwen模型
            has_qwen = await service.check_model_exists()
            if not has_qwen:
                print("⚠️  未找到qwen模型，将尝试使用其他可用模型")
                # 使用第一个可用模型
                service.model_name = models[0].get('name', 'qwen2.5:latest')
                print(f"📝 切换到模型: {service.model_name}")
            else:
                print(f"✅ Qwen模型可用: {service.model_name}")
                
            return True
            
    except Exception as e:
        print(f"❌ 测试Ollama服务失败: {e}")
        return False

async def test_chat_service():
    """测试聊天服务"""
    print("\n🤖 测试AI聊天服务...")
    
    try:
        from src.services.ollama_service import OllamaService, JHChatService
        
        ollama_service = OllamaService()
        chat_service = JHChatService(ollama_service)
        
        # 测试简单对话
        test_message = "你好，我想了解如何优化简历"
        print(f"📨 发送测试消息: {test_message}")
        
        response = await chat_service.chat(test_message)
        
        if response and "response" in response:
            print("✅ AI聊天服务工作正常")
            print(f"🤖 AI回复: {response['response'][:100]}...")
            return True
        else:
            print("❌ AI聊天服务返回异常")
            return False
            
    except Exception as e:
        print(f"❌ 测试聊天服务失败: {e}")
        return False
    finally:
        await ollama_service.client.aclose()

async def test_jh_data_service():
    """测试JH数据服务"""
    print("\n📊 测试JH数据服务...")
    
    try:
        from src.jh_subsystem.job_data_service import job_data_service
        
        # 初始化数据库
        await job_data_service.initialize_database()
        print("✅ JH数据库初始化成功")
        
        # 获取统计信息
        stats = await job_data_service.get_overview_statistics()
        if stats:
            print(f"📈 职位数据统计: {stats}")
            return True
        else:
            print("⚠️  暂无职位数据")
            return True  # 仍然认为服务正常
            
    except Exception as e:
        print(f"❌ 测试JH数据服务失败: {e}")
        return False

def check_dependencies():
    """检查依赖项"""
    print("🔧 检查依赖项...")
    
    required_packages = [
        'httpx',
        'fastapi',
        'uvicorn',
        'pydantic'
    ]
    
    missing_packages = []
    
    for package in required_packages:
        try:
            __import__(package)
            print(f"✅ {package}")
        except ImportError:
            print(f"❌ {package} (缺失)")
            missing_packages.append(package)
    
    if missing_packages:
        print(f"\n请安装缺失的包: pip install {' '.join(missing_packages)}")
        return False
    
    return True

def print_setup_instructions():
    """打印设置说明"""
    print("\n" + "="*60)
    print("🚀 JH AI聊天功能设置说明")
    print("="*60)
    print()
    print("1. 安装Ollama:")
    print("   - 访问 https://ollama.ai 下载安装")
    print("   - 或使用: curl -fsSL https://ollama.ai/install.sh | sh")
    print()
    print("2. 下载Qwen模型:")
    print("   ollama pull qwen2.5")
    print("   # 或者其他中文模型")
    print("   ollama pull qwen:latest")
    print()
    print("3. 启动Ollama服务:")
    print("   ollama serve")
    print()
    print("4. 安装Python依赖:")
    print("   pip install httpx fastapi uvicorn pydantic")
    print()
    print("5. 启动BoLe HR系统:")
    print("   python run_server.py")
    print()
    print("6. 访问JH子系统:")
    print("   http://localhost:8000/frontend/jh_subsystem/jh_dashboard.html")
    print()
    print("="*60)

async def main():
    """主函数"""
    print("🎯 JH子系统AI聊天功能测试")
    print("="*50)
    
    # 检查依赖
    if not check_dependencies():
        print_setup_instructions()
        return
    
    # 测试Ollama服务
    ollama_ok = await test_ollama_service()
    
    if not ollama_ok:
        print("\n❌ Ollama服务测试失败")
        print_setup_instructions()
        return
    
    # 测试JH数据服务
    data_ok = await test_jh_data_service()
    
    # 测试聊天服务
    chat_ok = await test_chat_service()
    
    # 总结
    print("\n" + "="*50)
    print("📋 测试结果总结:")
    print(f"  Ollama服务: {'✅ 正常' if ollama_ok else '❌ 失败'}")
    print(f"  数据服务:   {'✅ 正常' if data_ok else '❌ 失败'}")
    print(f"  聊天服务:   {'✅ 正常' if chat_ok else '❌ 失败'}")
    
    if ollama_ok and data_ok and chat_ok:
        print("\n🎉 所有服务测试通过！")
        print("您现在可以在前端界面使用AI聊天功能了。")
        print("访问: http://localhost:8000/frontend/jh_subsystem/jh_dashboard.html")
    else:
        print("\n⚠️  部分服务存在问题，请检查上述错误信息。")

if __name__ == "__main__":
    try:
        asyncio.run(main())
    except KeyboardInterrupt:
        print("\n\n👋 测试中断") 