#!/bin/bash

# WFH CLI - 交互式模型选择工具

set -e

RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
CYAN='\033[0;36m'
MAGENTA='\033[0;35m'
NC='\033[0m'

print_header() {
    echo -e "${CYAN}"
    cat << 'EOF'
╔══════════════════════════════════════════════════════╗
║          🌟 WFH CLI - 模型选择工具 🌟              ║
╚══════════════════════════════════════════════════════╝
EOF
    echo -e "${NC}"
}

print_info() {
    echo -e "${BLUE}ℹ${NC} $1"
}

print_success() {
    echo -e "${GREEN}✓${NC} $1"
}

print_warning() {
    echo -e "${YELLOW}⚠${NC} $1"
}

print_error() {
    echo -e "${RED}✗${NC} $1"
}

# 显示模型列表
show_models() {
    echo ""
    echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
    echo -e "${YELLOW}可用的模型提供商：${NC}"
    echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
    echo ""
    echo -e "${GREEN}1)${NC} DeepSeek ${MAGENTA}(推荐 - 性价比之王)${NC}"
    echo "   - deepseek-chat: 综合能力强，月成本 ¥5-20"
    echo "   - deepseek-reasoner: 推理能力极强"
    echo ""
    echo -e "${GREEN}2)${NC} Qwen ${MAGENTA}(通义千问 - 中文能力最强)${NC}"
    echo "   - qwen-plus: 性能与成本平衡，月成本 ¥30-100"
    echo "   - qwen-turbo: 速度快，价格低"
    echo "   - qwen-max: 性能最强"
    echo "   - qwen-long: 100万tokens 超长上下文"
    echo ""
    echo -e "${GREEN}3)${NC} Kimi ${MAGENTA}(Moonshot - 超长上下文)${NC}"
    echo "   - moonshot-v1-32k: 32K 上下文，月成本 ¥50-200"
    echo "   - moonshot-v1-128k: 128K 超长上下文"
    echo ""
    echo -e "${GREEN}4)${NC} Doubao ${MAGENTA}(豆包 - 字节跳动)${NC}"
    echo "   - doubao-pro-32k: 响应快，月成本 ¥10-50"
    echo "   - doubao-lite-4k: 价格极低"
    echo ""
    echo -e "${GREEN}5)${NC} GLM ${MAGENTA}(智谱 AI - 多模态)${NC}"
    echo "   - glm-4: 综合能力强，月成本 ¥20-100"
    echo "   - glm-4-flash: 免费版本（有限额）"
    echo ""
    echo -e "${GREEN}6)${NC} OpenAI ${MAGENTA}(国际标准)${NC}"
    echo "   - gpt-4-turbo: 最成熟的模型"
    echo ""
    echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
}

# 设置模型配置
set_model() {
    local provider=$1
    local model=$2
    local api_key_var=$3
    
    # 检查配置文件
    local config_file="$HOME/.gemini/.env"
    
    if [ ! -f "$config_file" ]; then
        mkdir -p "$HOME/.gemini"
        touch "$config_file"
    fi
    
    # 读取现有的 API Key
    source "$config_file" 2>/dev/null || true
    
    local current_key="${!api_key_var}"
    
    # 更新或写入配置
    if grep -q "^LLM_PROVIDER=" "$config_file"; then
        sed -i '' "s|^LLM_PROVIDER=.*|LLM_PROVIDER=$provider|" "$config_file"
    else
        echo "LLM_PROVIDER=$provider" >> "$config_file"
    fi
    
    if [ -n "$model" ]; then
        if grep -q "^LLM_MODEL=" "$config_file"; then
            sed -i '' "s|^LLM_MODEL=.*|LLM_MODEL=$model|" "$config_file"
        else
            echo "LLM_MODEL=$model" >> "$config_file"
        fi
    fi
    
    # 确保搜索引擎配置存在
    if ! grep -q "^SEARCH_ENGINE=" "$config_file"; then
        echo "SEARCH_ENGINE=duckduckgo" >> "$config_file"
    fi
    
    print_success "已设置模型提供商: $provider"
    if [ -n "$model" ]; then
        print_success "已设置模型版本: $model"
    fi
    
    # 检查 API Key
    if [ -z "$current_key" ]; then
        echo ""
        print_warning "未检测到 $api_key_var"
        echo ""
        read -p "请输入 $api_key_var: " new_key
        
        if [ -n "$new_key" ]; then
            if grep -q "^$api_key_var=" "$config_file"; then
                sed -i '' "s|^$api_key_var=.*|$api_key_var=$new_key|" "$config_file"
            else
                echo "$api_key_var=$new_key" >> "$config_file"
            fi
            print_success "API Key 已保存"
        fi
    else
        print_success "$api_key_var 已配置"
    fi
}

# 主菜单
main_menu() {
    print_header
    show_models
    
    echo ""
    read -p "$(echo -e ${CYAN}请选择模型提供商 [1-6]:${NC} )" choice
    
    case $choice in
        1)
            echo ""
            echo -e "${YELLOW}DeepSeek 模型版本：${NC}"
            echo "  1) deepseek-chat (推荐)"
            echo "  2) deepseek-reasoner (推理增强)"
            echo ""
            read -p "$(echo -e ${CYAN}选择版本 [1-2, 默认 1]:${NC} )" model_choice
            
            case ${model_choice:-1} in
                1) set_model "deepseek" "deepseek-chat" "DEEPSEEK_API_KEY" ;;
                2) set_model "deepseek" "deepseek-reasoner" "DEEPSEEK_API_KEY" ;;
                *) set_model "deepseek" "" "DEEPSEEK_API_KEY" ;;
            esac
            ;;
        2)
            echo ""
            echo -e "${YELLOW}Qwen 模型版本：${NC}"
            echo "  1) qwen-plus (推荐 - 平衡)"
            echo "  2) qwen-turbo (速度快)"
            echo "  3) qwen-max (性能最强)"
            echo "  4) qwen-long (超长上下文)"
            echo ""
            read -p "$(echo -e ${CYAN}选择版本 [1-4, 默认 1]:${NC} )" model_choice
            
            case ${model_choice:-1} in
                1) set_model "qwen" "qwen-plus" "QWEN_API_KEY" ;;
                2) set_model "qwen" "qwen-turbo" "QWEN_API_KEY" ;;
                3) set_model "qwen" "qwen-max" "QWEN_API_KEY" ;;
                4) set_model "qwen" "qwen-long" "QWEN_API_KEY" ;;
                *) set_model "qwen" "" "QWEN_API_KEY" ;;
            esac
            ;;
        3)
            echo ""
            echo -e "${YELLOW}Kimi 模型版本：${NC}"
            echo "  1) moonshot-v1-32k (推荐)"
            echo "  2) moonshot-v1-8k (短上下文)"
            echo "  3) moonshot-v1-128k (超长上下文)"
            echo ""
            read -p "$(echo -e ${CYAN}选择版本 [1-3, 默认 1]:${NC} )" model_choice
            
            case ${model_choice:-1} in
                1) set_model "kimi" "moonshot-v1-32k" "KIMI_API_KEY" ;;
                2) set_model "kimi" "moonshot-v1-8k" "KIMI_API_KEY" ;;
                3) set_model "kimi" "moonshot-v1-128k" "KIMI_API_KEY" ;;
                *) set_model "kimi" "" "KIMI_API_KEY" ;;
            esac
            ;;
        4)
            echo ""
            echo -e "${YELLOW}Doubao 模型版本：${NC}"
            echo "  1) doubao-pro-32k (推荐)"
            echo "  2) doubao-lite-4k (超低价)"
            echo "  3) doubao-pro-128k (长上下文)"
            echo ""
            read -p "$(echo -e ${CYAN}选择版本 [1-3, 默认 1]:${NC} )" model_choice
            
            case ${model_choice:-1} in
                1) set_model "doubao" "doubao-pro-32k" "DOUBAO_API_KEY" ;;
                2) set_model "doubao" "doubao-lite-4k" "DOUBAO_API_KEY" ;;
                3) set_model "doubao" "doubao-pro-128k" "DOUBAO_API_KEY" ;;
                *) set_model "doubao" "" "DOUBAO_API_KEY" ;;
            esac
            ;;
        5)
            echo ""
            echo -e "${YELLOW}GLM 模型版本：${NC}"
            echo "  1) glm-4 (推荐)"
            echo "  2) glm-4-flash (免费)"
            echo "  3) glm-4-air (轻量)"
            echo ""
            read -p "$(echo -e ${CYAN}选择版本 [1-3, 默认 1]:${NC} )" model_choice
            
            case ${model_choice:-1} in
                1) set_model "glm" "glm-4" "GLM_API_KEY" ;;
                2) set_model "glm" "glm-4-flash" "GLM_API_KEY" ;;
                3) set_model "glm" "glm-4-air" "GLM_API_KEY" ;;
                *) set_model "glm" "" "GLM_API_KEY" ;;
            esac
            ;;
        6)
            set_model "openai" "gpt-4-turbo" "OPENAI_API_KEY"
            ;;
        *)
            print_error "无效的选择"
            exit 1
            ;;
    esac
    
    echo ""
    print_success "配置已保存到: $HOME/.gemini/.env"
    echo ""
    
    # 询问是否立即启动
    read -p "$(echo -e ${CYAN}是否立即启动 WFH CLI? [Y/n]:${NC} )" launch
    
    if [[ ! $launch =~ ^[Nn]$ ]]; then
        echo ""
        print_success "正在启动 WFH CLI..."
        echo ""
        wfh-cli
    else
        echo ""
        print_info "稍后可以运行以下命令启动:"
        echo "  wfh-cli"
    fi
}

# 运行
main_menu

