package ollama

import (
	"context"
	"errors"
	"fmt"
	"log/slog"
	"net"
	"net/http"
	"net/url"
	"quipus/internal/llm"
	"quipus/internal/utils"
	"strconv"
	"strings"
	"time"

	mark3labs "github.com/mark3labs/mcp-go/mcp"
	"github.com/ollama/ollama/api"
	OLLAMA "github.com/ollama/ollama/api"
	"github.com/ollama/ollama/envconfig"
)

type OllamaClient struct {
	client *OLLAMA.Client
	model  string
}

type OllamaAuthInfo struct {
	SecretKey string `json:"secret_key"`
}

func parseUrl(baseUrl string) *url.URL {
	if len(baseUrl) == 0 {
		return envconfig.Host()
	}

	defaultPort := "11434"

	// s := strings.TrimSpace(Var("OLLAMA_HOST"))
	scheme, hostport, ok := strings.Cut(baseUrl, "://")
	switch {
	case !ok:
		scheme, hostport = "http", baseUrl
	case scheme == "http":
		defaultPort = "80"
	case scheme == "https":
		defaultPort = "443"
	}

	hostport, path, _ := strings.Cut(hostport, "/")
	host, port, err := net.SplitHostPort(hostport)
	if err != nil {
		host, port = "127.0.0.1", defaultPort
		if ip := net.ParseIP(strings.Trim(hostport, "[]")); ip != nil {
			host = ip.String()
		} else if hostport != "" {
			host = hostport
		}
	}

	if n, err := strconv.ParseInt(port, 10, 32); err != nil || n > 65535 || n < 0 {
		slog.Warn("invalid port, using default", "port", port, "default", defaultPort)
		port = defaultPort
	}

	return &url.URL{
		Scheme: scheme,
		Host:   net.JoinHostPort(host, port),
		Path:   path,
	}
}

func NewClient(config *llm.LLMClientConfig) (*OllamaClient, error) {
	url := parseUrl(config.BaseUrl)
	client := OLLAMA.NewClient(url, http.DefaultClient)
	c := &OllamaClient{
		client: client,
		model:  config.Model,
	}
	return c, nil
}

func (c *OllamaClient) Chat(ctx context.Context, req *llm.ChatReq) error {
	if req == nil {
		return nil
	}

	model := req.Model
	msgs := req.Msgs

	if len(msgs) < 1 {
		return errors.New("message is null")
	}

	if len(model) < 1 {
		model = c.model
		if len(model) < 1 {
			return errors.New("model is null")
		}
	}

	messages := []OLLAMA.Message{}
	for _, m := range msgs {
		messages = append(messages, OLLAMA.Message{
			Role:    m.Role,
			Content: m.Content,
		})
	}

	reqMsg := &OLLAMA.ChatRequest{
		Model:    model,
		Messages: messages,
	}

	if req.Tools != nil && len(req.Tools) > 0 {
		slog.Info("add tools to ollama request")
		reqMsg.Tools = convertTool(req.Tools)
		slog.Info("tools:", slog.Any("", reqMsg.Tools))
	}

	respFunc := func(resp OLLAMA.ChatResponse) error {
		recv := resp.Message.Content

		select {
		case req.RspChan <- recv:
			// slog.Info("get response: ", slog.Any("", recv))
		case <-ctx.Done():
			slog.Info("ollama recv quit signal ")
			return nil
		}

		if resp.Done {
			msg := &OllamaMessage{Message: resp.Message}
			select {
			case req.CompleteMessageChan <- msg:
				// slog.Info("get response: ", slog.Any("", recv))
			case <-ctx.Done():
				slog.Info("ollama recv quit signal ")
				return nil
			}
			fmt.Println("ollama done:", msg)
		}
		// b.WriteString(recv)
		return nil
	}

	defer func() {
		req.CompleteChan <- true
	}()

	e := c.client.Chat(ctx, reqMsg, respFunc)
	if e != nil {
		slog.Error("ollama chat error")
	}
	return e
}

func (c *OllamaClient) GetProviderName() string {
	return "ollama"
}

func (c *OllamaClient) Embedding(ctx context.Context, req *llm.EmbeddingReq) ([]float64, error) {
	model := req.Model
	if len(model) < 1 {
		model = c.model
		if len(model) < 1 {
			return nil, errors.New("model is null")
		}
	}
	msg := &OLLAMA.EmbeddingRequest{
		Model:  model,
		Prompt: req.Prompt,
	}

	response, err := c.client.Embeddings(ctx, msg)
	if err != nil {
		return nil, err
	}

	return response.Embedding, nil
}

func (c *OllamaClient) Complete(ctx context.Context, req *llm.CompleteReq) (llm.Message, error) {
	if req == nil {
		return nil, nil
	}

	model := req.Model
	msgs := req.Msgs

	if len(msgs) < 1 {
		return nil, errors.New("message is null")
	}

	if len(model) < 1 {
		model = c.model
		if len(model) < 1 {
			return nil, errors.New("model is null")
		}
	}

	messages := []OLLAMA.Message{}
	for _, m := range msgs {
		messages = append(messages, OLLAMA.Message{
			Role:    m.Role,
			Content: m.Content,
		})
	}

	reqMsg := &OLLAMA.ChatRequest{
		Model:    model,
		Messages: messages,
		Options:  req.Options,
		Stream:   utils.BoolPtr(false),
	}

	if req.Tools != nil {
		reqMsg.Tools = convertTool(req.Tools)
	}

	var response OLLAMA.Message
	respFunc := func(resp OLLAMA.ChatResponse) error {
		if resp.Done {
			response = resp.Message
			return nil
		}
		return nil
	}

	e := c.client.Chat(ctx, reqMsg, respFunc)
	if e != nil {
		return nil, e
	}

	return &OllamaMessage{Message: response}, nil
}

// copy from mark3labs mcp host
func convertTool(tools []mark3labs.Tool) OLLAMA.Tools {
	var ollamaTools []OLLAMA.Tool
	for _, tool := range tools {
		t := OLLAMA.Tool{
			Type: "function",
			Function: OLLAMA.ToolFunction{
				Name:        tool.Name,
				Description: tool.Description,
				Parameters: struct {
					Type       string   `json:"type"`
					Required   []string `json:"required"`
					Properties map[string]struct {
						Type        string   `json:"type"`
						Description string   `json:"description"`
						Enum        []string `json:"enum,omitempty"`
					} `json:"properties"`
				}{
					Type:       tool.InputSchema.Type,
					Required:   tool.InputSchema.Required,
					Properties: convertProperties(tool.InputSchema.Properties),
				},
			},
		}
		ollamaTools = append(ollamaTools, t)
	}
	return ollamaTools
}

func getString(m map[string]interface{}, key string) string {
	if v, ok := m[key].(string); ok {
		return v
	}
	return ""
}

func convertProperties(props map[string]interface{}) map[string]struct {
	Type        string   `json:"type"`
	Description string   `json:"description"`
	Enum        []string `json:"enum,omitempty"`
} {
	result := make(map[string]struct {
		Type        string   `json:"type"`
		Description string   `json:"description"`
		Enum        []string `json:"enum,omitempty"`
	})

	for name, prop := range props {
		if propMap, ok := prop.(map[string]interface{}); ok {
			prop := struct {
				Type        string   `json:"type"`
				Description string   `json:"description"`
				Enum        []string `json:"enum,omitempty"`
			}{
				Type:        getString(propMap, "type"),
				Description: getString(propMap, "description"),
			}

			// Handle enum if present
			if enumRaw, ok := propMap["enum"].([]interface{}); ok {
				for _, e := range enumRaw {
					if str, ok := e.(string); ok {
						prop.Enum = append(prop.Enum, str)
					}
				}
			}

			result[name] = prop
		}
	}

	return result
}

type OllamaMessage struct {
	Message    OLLAMA.Message
	ToolCallID string // Store tool call ID separately since Ollama API doesn't have this field
}

// copy from mcphost
func (m *OllamaMessage) GetRole() string {
	return m.Message.Role
}

func (m *OllamaMessage) GetContent() string {
	// For tool responses and regular messages, just return the content string
	return strings.TrimSpace(m.Message.Content)
}

func (m *OllamaMessage) GetToolCalls() []llm.ToolCall {
	var calls []llm.ToolCall
	for _, call := range m.Message.ToolCalls {
		calls = append(calls, NewOllamaToolCall(call))
	}
	return calls
}

func (m *OllamaMessage) GetUsage() (int, int) {
	return 0, 0 // Ollama doesn't provide token usage info
}

func (m *OllamaMessage) IsToolResponse() bool {
	return m.Message.Role == "tool"
}

func (m *OllamaMessage) GetToolResponseID() string {
	return m.ToolCallID
}

type OllamaToolCall struct {
	call api.ToolCall
	id   string // Store a unique ID for the tool call
}

func NewOllamaToolCall(call api.ToolCall) *OllamaToolCall {
	return &OllamaToolCall{
		call: call,
		id: fmt.Sprintf(
			"tc_%s_%d",
			call.Function.Name,
			time.Now().UnixNano(),
		),
	}
}

func (t *OllamaToolCall) GetName() string {
	return t.call.Function.Name
}

func (t *OllamaToolCall) GetArguments() map[string]interface{} {
	return t.call.Function.Arguments
}

func (t *OllamaToolCall) GetID() string {
	return t.id
}
