# Ollama 配置测试脚本
# Windows PowerShell 7.5 兼容版本

param(
    [switch]$Verbose
)

# 设置错误处理
$ErrorActionPreference = "Stop"
$VerbosePreference = if ($Verbose) { "Continue" } else { "SilentlyContinue" }

# 颜色输出函数
function Write-ColorOutput {
    param(
        [string]$Message,
        [string]$Color = "White"
    )
    Write-Host $Message -ForegroundColor $Color
}

function Write-Success {
    param([string]$Message)
    Write-ColorOutput "✅ $Message" "Green"
}

function Write-Error {
    param([string]$Message)
    Write-ColorOutput "❌ $Message" "Red"
}

function Write-Warning {
    param([string]$Message)
    Write-ColorOutput "⚠️  $Message" "Yellow"
}

function Write-Info {
    param([string]$Message)
    Write-ColorOutput "ℹ️  $Message" "Cyan"
}

# 获取脚本所在目录
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
Set-Location $ScriptDir

Write-ColorOutput "🧪 测试 Ollama 配置..." "Magenta"

# 加载环境变量
$envFile = Join-Path $ScriptDir ".env"
if (Test-Path $envFile) {
    Write-Info "加载环境配置..."
    Get-Content $envFile | Where-Object { $_ -match '^[^#].*=' } | ForEach-Object {
        $key, $value = $_.Split('=', 2)
        $key = $key.Trim()
        $value = $value.Trim()
        [Environment]::SetEnvironmentVariable($key, $value)
        Write-Verbose "设置环境变量: $key = $value"
    }
}

# 检查环境变量
$ollamaBaseUrl = $env:OLLAMA_BASE_URL
if (-not $ollamaBaseUrl) {
    Write-Warning "OLLAMA_BASE_URL 未设置，使用默认值: http://localhost:11434"
    $ollamaBaseUrl = "http://localhost:11434"
}

$ollamaModelName = $env:OLLAMA_MODEL_NAME
if (-not $ollamaModelName) {
    Write-Warning "OLLAMA_MODEL_NAME 未设置，使用默认值: qwen2.5:7b"
    $ollamaModelName = "qwen2.5:7b"
}

Write-Info "服务地址: $ollamaBaseUrl"
Write-Info "模型名称: $ollamaModelName"

# 测试 Ollama 服务连接
Write-Info "🌐 测试服务连接..."
try {
    $response = Invoke-WebRequest -Uri "$ollamaBaseUrl/api/tags" -TimeoutSec 10
    if ($response.StatusCode -eq 200) {
        Write-Success "Ollama 服务连接正常"
    } else {
        Write-Error "Ollama 服务响应异常: $($response.StatusCode)"
        exit 1
    }
} catch {
    Write-Error "无法连接到 Ollama 服务: $_"
    Write-Info "💡 请确保 Ollama 服务正在运行:"
    Write-Info "   - 如果使用本地安装: ollama serve"
    Write-Info "   - 如果使用 Docker: docker-compose up ollama"
    exit 1
}

# 检查模型是否已安装
Write-Info "📦 检查模型安装..."
try {
    $response = Invoke-WebRequest -Uri "$ollamaBaseUrl/api/tags" -TimeoutSec 10
    $tagsData = $response.Content | ConvertFrom-Json

    $modelExists = $false
    foreach ($model in $tagsData.models) {
        if ($model.name -eq $ollamaModelName) {
            $modelExists = $true
            break
        }
    }

    if ($modelExists) {
        Write-Success "模型 $ollamaModelName 已安装"
    } else {
        Write-Error "模型 $ollamaModelName 未安装"
        Write-Info "💡 安装方法:"
        Write-Info "   - 本地安装: ollama pull $ollamaModelName"
        Write-Info "   - 或者运行: .\init-models.ps1"
        exit 1
    }
} catch {
    Write-Error "检查模型安装失败: $_"
    exit 1
}

# 测试模型推理
Write-Info "🧠 测试模型推理..."
$testPrompt = "请简单介绍一下你自己"

try {
    $requestBody = @{
        model = $ollamaModelName
        prompt = $testPrompt
        stream = $false
        options = @{
            temperature = 0.1
            num_predict = 100
        }
    } | ConvertTo-Json

    $response = Invoke-WebRequest -Uri "$ollamaBaseUrl/api/generate" `
        -Method POST `
        -Body $requestBody `
        -ContentType "application/json" `
        -TimeoutSec 30

    if ($response.StatusCode -eq 200) {
        $result = $response.Content | ConvertFrom-Json
        $generatedText = $result.response

        if ($generatedText) {
            Write-Success "模型推理测试成功"
            Write-Info "📝 模型响应预览:"
            # 显示前100个字符
            if ($generatedText.Length -gt 100) {
                Write-ColorOutput "   $($generatedText.Substring(0, 100))..." "Gray"
            } else {
                Write-ColorOutput "   $generatedText" "Gray"
            }
        } else {
            Write-Error "模型返回空响应"
            exit 1
        }
    } else {
        Write-Error "模型推理请求失败: $($response.StatusCode)"
        exit 1
    }
} catch {
    Write-Error "模型推理测试失败: $_"
    exit 1
}

# 测试 AI Agent 后端
Write-Info "🤖 测试 AI Agent 后端..."
try {
    $response = Invoke-WebRequest -Uri "http://localhost:8000/health" -TimeoutSec 10
    if ($response.StatusCode -eq 200) {
        Write-Success "AI Agent 后端连接正常"
    } else {
        Write-Warning "AI Agent 后端响应异常: $($response.StatusCode)"
    }
} catch {
    Write-Warning "无法连接到 AI Agent 后端: $_"
    Write-Info "💡 请先启动后端服务:"
    Write-Info "   - 开发环境: cd backend; python -m uvicorn main:app --reload"
    Write-Info "   - 或者运行: .\start.ps1"
}

Write-ColorOutput "" "Magenta"
Write-Success "所有测试通过！Ollama 配置正确。"
Write-ColorOutput "" "Magenta"
Write-Info "🚀 现在可以访问前端界面:"
Write-Info "   http://localhost:3000"
Write-Info "🔧 后端 API:"
Write-Info "   http://localhost:8000"
Write-Info "📊 API 文档:"
Write-Info "   http://localhost:8000/docs"
