package builtin
import (
	"bufio"
	"bytes"
	"context"
	"encoding/json"
	"fmt"
	"io/ioutil"
	"log"
	"net/http"
	"os"
	"strings"
	"time"

	"pwsh-go/internal/algorithms"
	"pwsh-go/internal/config"
	"pwsh-go/internal/powershell"
)
const (
	debugMode    = false
    pingTimeout  = 60 * time.Second
    chatTimeout  = 60 * time.Second
    chatRetries  = 3
)
func dLog(format string, a ...interface{}) {
	if debugMode {
		log.Printf(format, a...)
	}
}
func ping(url, token string) error {
	ctx, cancel := context.WithTimeout(context.Background(), 5 * time.Second)
	defer cancel()
	req, _ := http.NewRequestWithContext(ctx, http.MethodPost, url, strings.NewReader(`{}`))
	req.Header.Set("Authorization", token)
	_, err := http.DefaultClient.Do(req)
	return err
}

func probe(modelType, endpoint, token, botID string) error {
    ctx, cancel := context.WithTimeout(context.Background(), pingTimeout)
    defer cancel()

    switch modelType {
    case "coze":
        return probeCoze(ctx, endpoint, token, botID)
    case "openai":
        return probeOpenAI(ctx, endpoint, token)
    default:
        return fmt.Errorf("unknown type")
    }
}

func probeCoze(ctx context.Context, endpoint, token, botID string) error {
    body := map[string]interface{}{
        "bot_id": botID,
        "user_id": "123456789",
        "stream": true,
        "additional_messages": []map[string]string{
            {"content": "你好", "content_type": "text", "role": "user", "type": "question"},
        },
    }
    bodyBytes, err := json.Marshal(body)
    if err != nil {
        return err
    }
    dLog("Coze Probe Request Body: %s", string(bodyBytes))
    req, err := http.NewRequestWithContext(ctx, "POST", endpoint, bytes.NewBuffer(bodyBytes))
    if err != nil {
        return err
    }
    req.Header.Set("Content-Type", "application/json")
    req.Header.Set("Authorization", "Bearer " + token)
    req.Header.Set("Accept", "text/event-stream")
    client := &http.Client{}
    if deadline, ok := ctx.Deadline(); ok {
        client.Timeout = time.Until(deadline)
    }
    resp, err := client.Do(req)
    if err != nil {
        return err
    }
    defer resp.Body.Close()
    dLog("Coze Probe Response Status: %d", resp.StatusCode)
    if resp.StatusCode != http.StatusOK {
        bodyBytes, _ := ioutil.ReadAll(resp.Body)
        dLog("Coze Probe Error Response: %s", string(bodyBytes))
        return fmt.Errorf("status %d", resp.StatusCode)
    }
    scanner := bufio.NewScanner(resp.Body)
    var eventType string
    for scanner.Scan() {
        line := scanner.Text()
        dLog("Coze Probe SSE Line: %s", line)
        if strings.HasPrefix(line, "event:") {
            eventType = strings.TrimSpace(strings.TrimPrefix(line, "event:"))
        } else if strings.HasPrefix(line, "data:") {
            data := strings.TrimSpace(strings.TrimPrefix(line, "data:"))
            var eventData map[string]interface{}
            if err := json.Unmarshal([]byte(data), &eventData); err != nil {
                continue
            }
            if eventType == "conversation.chat.completed" {
                return nil
            }
        }
    }
    if err := scanner.Err(); err != nil {
        return err
    }
    return fmt.Errorf("no completed event received")
}

func probeOpenAI(ctx context.Context, endpoint, token string) error {
    modelName := "gpt-3.5-turbo"
    switch currentModel.Alias {
    case "deepseek":
        modelName = "deepseek-chat"
    case "qwen":
        modelName = "qwen-plus"
    }
    body := map[string]interface{}{
        "model": modelName,
        "messages": []map[string]string{
            {"role": "system", "content": "You are a helpful assistant."},
            {"role": "user", "content": "你好"},
        },
        "stream": false,
    }
    bodyBytes, err := postJSON(ctx, endpoint, "Bearer "+token, body)
    if err != nil {
        return err
    }
    var result struct {
        Choices []struct {
            Message struct {
                Content string `json:"content"`
            } `json:"message"`
        } `json:"choices"`
    }
    if err := json.Unmarshal(bodyBytes, &result); err != nil {
        return err
    }
    if len(result.Choices) == 0 || result.Choices[0].Message.Content == "" {
        return fmt.Errorf("no response content")
    }
    return nil
}

func postJSON(ctx context.Context, url, auth string, body interface{}) ([]byte, error) {
    b, err := json.Marshal(body)
    if err != nil {
        return nil, err
    }
    req, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewReader(b))
    if err != nil {
        return nil, err
    }
    req.Header.Set("Content-Type", "application/json")
    req.Header.Set("Authorization", auth)

    client := &http.Client{}
    if deadline, ok := ctx.Deadline(); ok {
        client.Timeout = time.Until(deadline)
    }
    resp, err := client.Do(req)
    if err != nil {
        return nil, err
    }
    defer resp.Body.Close()

    if resp.StatusCode != 200 {
        return nil, fmt.Errorf("status %d", resp.StatusCode)
    }
    bodyBytes, err := ioutil.ReadAll(resp.Body)
    if err != nil {
        return nil, err
    }
    return bodyBytes, nil
}

// Message represents a chat message
type Message struct {
	Role    string `json:"role"`
	Content string `json:"content"`
}

// activeClient is the function to call the AI API
var activeClient func([]Message) (string, error)

// currentModel holds the current connection info
var currentModel struct {
	Alias    string
	Type     string
	Endpoint string
	Headers  map[string]string
}

// 全局优化组件
var globalAIOptimizer *algorithms.AIOptimizer
var globalInferenceAccel *algorithms.InferenceAccelerator

// inChat indicates if we are in chat mode
var inChat bool
var conversationID string

// AiCmd is the built-in ai command
type AiCmd struct{}

// Name returns the command name
func (a AiCmd) Name() string { return "ai" }

// Usage returns the usage string
func (a AiCmd) Usage() string { return "ai openai|coze ... | chat | status | switch ... | disconnect | agent | enhanced-agent" }

// Run executes the ai command
func (a AiCmd) Run(args []string) error {
	if len(args) == 0 {
		return fmt.Errorf("usage: %s", a.Usage())
	}
	switch args[0] {
	case "openai":
		return handleOpenai(args[1:])
	case "coze":
		return handleCoze(args[1:])
	case "chat":
		return startChat()
	case "status":
		return showStatus()
	case "switch":
		return handleSwitch(args[1:])
	case "disconnect":
		return disconnect()
	case "agent":
		return startAgent()
	case "enhanced-agent":
		return startEnhancedAgent()
	default:
		return fmt.Errorf("unknown subcommand: %s", args[0])
	}
}

func handleOpenai(args []string) error {
	if len(args) < 2 {
		return fmt.Errorf("usage: ai openai <provider> <token> [endpoint]")
	}
	provider := args[0]
	token := args[1]
	endpoint := ""
	if len(args) > 2 {
		endpoint = args[2]
	}
	switch provider {
	case "deepseek":
	if endpoint == "" {
		endpoint = "https://api.deepseek.com/v1/chat/completions"
	}
	case "qwen":
		if endpoint == "" {
			endpoint = "https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions"
		}
	case "ollama", "vllm":
		if endpoint == "" {
			return fmt.Errorf("endpoint required for %s", provider)
		}
	default:
		return fmt.Errorf("unsupported provider: %s", provider)
	}
	currentModel = struct {
		Alias    string
		Type     string
		Endpoint string
		Headers  map[string]string
	}{
		Alias:    provider,
		Type:     "openai",
		Endpoint: endpoint,
		Headers:  map[string]string{"Authorization": "Bearer " + token},
	}
    fmt.Println("正在连接... 按 Ctrl+C 取消。超时时间：1分钟")
    err := probe("openai", endpoint, token, "")
    if err != nil {
		return fmt.Errorf("✗ Failed (%v)", err)
	}
	fmt.Println("✓ Connected coze")
	activeClient = openaiChat
	if err := saveConfig(); err != nil {
		return err
	}
	return nil
}

func handleCoze(args []string) error {
	if len(args) < 1 {
		return fmt.Errorf("usage: ai coze <token> [bot_id]")
	}
	token := args[0]
	botID := ""
	if len(args) > 1 {
		botID = args[1]
	}
	endpoint := "https://api.coze.cn/v3/chat"
	currentModel = struct {
		Alias    string
		Type     string
		Endpoint string
		Headers  map[string]string
	}{
		Alias:    "coze",
		Type:     "coze",
		Endpoint: endpoint,
		Headers:  map[string]string{"Authorization": "Bearer " + token},
	}
	if botID != "" {
		currentModel.Headers["Bot-ID"] = botID
	}
	fmt.Println("正在连接... 按 Ctrl+C 取消。超时时间：1分钟")
    err := probe("coze", endpoint, token, botID)
    if err == nil {
        fmt.Println("✓ Connected coze")
		conversationID = ""
    } else if strings.Contains(err.Error(), "status") {
        fmt.Printf("✗ Failed (%s)\n", err)
    } else if strings.Contains(err.Error(), "deadline") {
        fmt.Println("✗ Failed (timeout)")
    } else {
        fmt.Printf("✗ Failed (%v)\n", err)
    }
    if err != nil {
        return err
    }
	activeClient = cozeChat
	if err := saveConfig(); err != nil {
		return err
	}
	return nil
}

func handleSwitch(args []string) error {
	// Similar to connect, but for switch
	if len(args) == 0 {
		return fmt.Errorf("usage: ai switch openai|coze ...")
	}
	switch args[0] {
	case "openai":
		return handleOpenai(args[1:])
	case "coze":
		return handleCoze(args[1:])
	default:
		return fmt.Errorf("unknown switch type: %s", args[0])
	}
}

func startChat() error {
	if activeClient == nil {
		return fmt.Errorf("no active connection. Use 'ai openai|coze' first")
	}
	inChat = true
	defer func() { inChat = false }()
	reader := bufio.NewReader(os.Stdin)
	var history []Message
	for {
		fmt.Print(">>> ")
		line, err := reader.ReadString('\n')
		if err != nil {
			return err
		}
		line = strings.TrimSpace(line)
		if line == "/exit" {
			break
		}
		if line == "" {
			continue
		}
		history = append(history, Message{Role: "user", Content: line})
		response, err := activeClient(history)
		if err != nil {
			fmt.Printf("Error: %v\n", err)
			continue
		}
		fmt.Println(response)
		fmt.Println()
		history = append(history, Message{Role: "assistant", Content: response})
	}
	return nil
}

func showStatus() error {
	if currentModel.Alias == "" {
		fmt.Println("Not connected.")
		return nil
	}
	fmt.Printf("Connected: %s (%s) %s\n", currentModel.Alias, currentModel.Type, currentModel.Endpoint)
	return nil
}

func disconnect() error {
	activeClient = nil
	conversationID = ""
	currentModel = struct {
		Alias    string
		Type     string
		Endpoint string
		Headers  map[string]string
	}{}
	if err := saveConfig(); err != nil {
		return err
	}
	fmt.Println("✓ Disconnected.")
	return nil
}

func openaiChat(messages []Message) (string, error) {
    for i := 0; i <= chatRetries; i++ {
        ctx, cancel := context.WithTimeout(context.Background(), chatTimeout)
        defer cancel()
        modelName := "gpt-3.5-turbo"
        switch currentModel.Alias {
        case "deepseek":
            modelName = "deepseek-chat"
        case "qwen":
            modelName = "qwen-plus"
        }
        chatMessages := []Message{{Role: "system", Content: "You are a helpful assistant."}}
        chatMessages = append(chatMessages, messages...)
        body := map[string]interface{}{
            "model":    modelName,
            "messages": chatMessages,
            "stream":   false,
        }
        bodyBytes, err := postJSON(ctx, currentModel.Endpoint, currentModel.Headers["Authorization"], body)
        if err != nil {
            if i == chatRetries {
                return "", err
            }
            continue
        }
        var result struct {
            Choices []struct {
                Message struct {
                    Content string `json:"content"`
                } `json:"message"`
            } `json:"choices"`
        }
        if err := json.Unmarshal(bodyBytes, &result); err != nil {
            return "", err
        }
        if len(result.Choices) == 0 || result.Choices[0].Message.Content == "" {
            return "", fmt.Errorf("no response content")
        }
        return result.Choices[0].Message.Content, nil
    }
    return "", fmt.Errorf("✗ Timeout after retries")
}

func cozeChat(messages []Message) (string, error) {
    ctx, cancel := context.WithTimeout(context.Background(), pingTimeout)
    defer cancel()
    lastMessage := messages[len(messages)-1]
    additionalMessages := []map[string]string{{
        "content": lastMessage.Content,
        "content_type": "text",
        "role": "user",
        "type": "question",
    }}
    body := map[string]interface{}{
        "bot_id": currentModel.Headers["Bot-ID"],
        "user_id": "123456789",
        "stream": true,
        "additional_messages": additionalMessages,
    }
    if conversationID != "" {
        body["conversation_id"] = conversationID
    }
    bodyBytes, err := json.Marshal(body)
    if err != nil {
        return "", err
    }
    dLog("Coze Chat Request Body: %s", string(bodyBytes))
    req, err := http.NewRequestWithContext(ctx, "POST", currentModel.Endpoint, bytes.NewBuffer(bodyBytes))
    if err != nil {
        return "", err
    }
    req.Header.Set("Content-Type", "application/json")
    req.Header.Set("Authorization", currentModel.Headers["Authorization"])
    req.Header.Set("Accept", "text/event-stream")
    resp, err := http.DefaultClient.Do(req)
    if err != nil {
        return "", err
    }
    defer resp.Body.Close()
    dLog("Coze Chat Response Status: %d", resp.StatusCode)
    if resp.StatusCode != http.StatusOK {
        bodyBytes, _ := ioutil.ReadAll(resp.Body)
        dLog("Coze Chat Error Response: %s", string(bodyBytes))
        return "", fmt.Errorf("Chat API error: %d - %s", resp.StatusCode, string(bodyBytes))
    }
    scanner := bufio.NewScanner(resp.Body)
    var content string
    var eventType string
    for scanner.Scan() {
        line := scanner.Text()
        dLog("Coze SSE Line: %s", line)
        if strings.HasPrefix(line, "event:") {
            eventType = strings.TrimPrefix(line, "event:")
            eventType = strings.TrimSpace(eventType)
        } else if strings.HasPrefix(line, "data:") {
            data := strings.TrimPrefix(line, "data:")
            data = strings.TrimSpace(data)
            var eventData map[string]interface{}
            if err := json.Unmarshal([]byte(data), &eventData); err != nil {
                continue
            }
            if eventType == "conversation.chat.created" {
                if convID, ok := eventData["conversation_id"].(string); ok {
                    conversationID = convID
                }
            } else if eventType == "conversation.message.delta" {
    if role, ok := eventData["role"].(string); ok && role == "assistant" {
        if typ, ok := eventData["type"].(string); ok && typ == "answer" {
            if delta, ok := eventData["content"].(string); ok {
                content += delta
            }
        }
    }
            } else if eventType == "conversation.chat.completed" {
                break
            }
        }
    }
    if err := scanner.Err(); err != nil {
        return "", err
    }
    if content != "" {
        return content, nil
    }
    return "", fmt.Errorf("no valid response")
}

func saveConfig() error {
	cfg := &config.AIConfig{}
	cfg.Current.Alias = currentModel.Alias
	cfg.Current.Type = currentModel.Type
	cfg.Current.Endpoint = currentModel.Endpoint
	cfg.Current.Headers = currentModel.Headers
	return config.SaveAIConfig(cfg)
}

// startEnhancedAgent starts the enhanced AI agent mode with tool-calling capabilities
func startEnhancedAgent() error {
	if activeClient == nil {
		return fmt.Errorf("❌ 请先连接 AI 服务 (使用 'ai openai' 或 'ai coze')")
	}

	backend, err := powershell.NewBackend()
	if err != nil {
		return fmt.Errorf("failed to create PowerShell backend: %w", err)
	}

	agent := NewEnhancedAgent(backend)
	return agent.Start()
}

func init() {
	// 初始化全局优化组件
	globalAIOptimizer = algorithms.NewAIOptimizer()
	globalInferenceAccel = algorithms.NewInferenceAccelerator()
	
	// Load config on startup
	cfg, err := config.LoadAIConfig()
	if err == nil && cfg.Current.Alias != "" {
		currentModel.Alias = cfg.Current.Alias
		currentModel.Type = cfg.Current.Type
		currentModel.Endpoint = cfg.Current.Endpoint
		currentModel.Headers = cfg.Current.Headers
		if currentModel.Type == "openai" {
			activeClient = openaiChat
		} else if currentModel.Type == "coze" {
			activeClient = cozeChat
		}
	}
}