#!/usr/bin/env python3
"""
Check Ollama AI service for ls8shell
"""

import sys
from pathlib import Path

# Add project root to path
project_root = Path(__file__).parent
sys.path.insert(0, str(project_root))

def check_ai_services():
    """Check Ollama AI service availability"""
    print("🤖 Checking Ollama AI Service...")
    print("=" * 50)
    
    ollama_available = False
    
    # Check Ollama
    try:
        import requests
        response = requests.get("http://localhost:11434/api/tags", timeout=5)
        if response.status_code == 200:
            models = response.json().get('models', [])
            print(f"✅ Ollama: Running with {len(models)} models")
            if models:
                for model in models[:5]:  # Show first 5 models
                    print(f"   - {model.get('name', 'Unknown')}")
            ollama_available = True
        else:
            print("❌ Ollama: Service not responding")
    except Exception as e:
        print(f"❌ Ollama: Not available ({e})")
    
    print("=" * 50)
    
    if ollama_available:
        print("🎉 Ollama service is available!")
        print("\nTesting AI engine initialization...")
        
        try:
            from ai_engine import AICommandGenerator
            ai_engine = AICommandGenerator()
            
            # Test command generation
            test_result = ai_engine.generate_command("列文件", "linux")
            print(f"✅ AI Engine: {test_result}")
            
            # Test chat response
            chat_result = ai_engine.generate_chat_response("Hello")
            print(f"✅ Chat Response: {chat_result}")
            
        except Exception as e:
            print(f"❌ AI Engine Error: {e}")
    else:
        print("❌ Ollama service not available!")
        print("\n💡 Setup instructions:")
        print("1. Install Ollama: https://ollama.ai/")
        print("2. Pull model: ollama pull llama2:7b-chat")
        print("3. Start service: ollama serve")
        print("4. Verify setup: python check_ai_services.py")

if __name__ == "__main__":
    check_ai_services()