package service

import (
	"bufio"
	"bytes"
	"context"
	"encoding/json"
	"errors"
	"fmt"
	"io"
	"net/http"
	"os"
	"strings"

	"agentdesign/models"
)

const (
	defaultDeepSeekBaseURL = "https://api.deepseek.com"
	defaultDeepSeekModel   = "deepseek-chat"
)

// PromptService proxies chat completion requests to OpenAI and streams deltas back.
type PromptService struct {
	httpClient   *http.Client
	apiKey       string
	baseURL      string
	defaultModel string
}

// NewPromptServiceFromEnv configures the prompt service using environment variables.
func NewPromptServiceFromEnv() *PromptService {
	apiKey := os.Getenv("DEEPSEEK_API_KEY")
	baseURL := strings.TrimRight(os.Getenv("DEEPSEEK_BASE_URL"), "/")
	if baseURL == "" {
		baseURL = defaultDeepSeekBaseURL
	}
	model := os.Getenv("DEEPSEEK_MODEL")
	if model == "" {
		model = defaultDeepSeekModel
	}

	return &PromptService{
		httpClient: &http.Client{
			Timeout: 0, // streaming request; let context manage cancellation.
		},
		apiKey:       apiKey,
		baseURL:      baseURL,
		defaultModel: model,
	}
}

// StreamChatCompletion sends a chat completion request and streams partial tokens to the provided handler.
func (p *PromptService) StreamChatCompletion(
	ctx context.Context,
	input models.PromptCompletionInput,
	handler func(models.PromptStreamChunk) error,
) error {
	if handler == nil {
		return errors.New("stream handler cannot be nil")
	}
	if p.apiKey == "" {
		return errors.New("DEEPSEEK_API_KEY is not configured")
	}
	if len(input.Messages) == 0 {
		return errors.New("no prompt messages provided")
	}

	payload := openAIChatRequest{
		Model:       p.pickModel(input.Model),
		Messages:    convertMessages(input.Messages),
		Stream:      true,
		Temperature: input.Temperature,
		MaxTokens:   input.MaxTokens,
		TopP:        input.TopP,
	}

	body, err := json.Marshal(payload)
	if err != nil {
		return err
	}

	req, err := http.NewRequestWithContext(ctx, http.MethodPost, p.baseURL+"/chat/completions", bytes.NewReader(body))
	if err != nil {
		return err
	}

	req.Header.Set("Content-Type", "application/json")
	req.Header.Set("Authorization", "Bearer "+p.apiKey)

	resp, err := p.httpClient.Do(req)
	if err != nil {
		return err
	}
	defer resp.Body.Close()

	if resp.StatusCode >= http.StatusBadRequest {
		data, _ := io.ReadAll(resp.Body)
		return fmt.Errorf("deepseek error: %s", strings.TrimSpace(string(data)))
	}

	reader := bufio.NewScanner(resp.Body)
	for reader.Scan() {
		select {
		case <-ctx.Done():
			return ctx.Err()
		default:
		}

		line := strings.TrimSpace(reader.Text())
		if line == "" || !strings.HasPrefix(line, "data:") {
			continue
		}

		payload := strings.TrimSpace(strings.TrimPrefix(line, "data:"))
		if payload == "[DONE]" {
			return handler(models.PromptStreamChunk{Type: models.PromptStreamChunkTypeDone})
		}

		var chunk openAIStreamChunk
		if err := json.Unmarshal([]byte(payload), &chunk); err != nil {
			continue
		}

		for _, choice := range chunk.Choices {
			if choice.Delta.Role != "" {
				if err := handler(models.PromptStreamChunk{
					Type: models.PromptStreamChunkTypeRole,
					Data: choice.Delta.Role,
				}); err != nil {
					return err
				}
			}

			if choice.Delta.Content == "" {
				continue
			}

			if err := handler(models.PromptStreamChunk{
				Type: models.PromptStreamChunkTypeToken,
				Data: choice.Delta.Content,
			}); err != nil {
				return err
			}
		}
	}

	if err := reader.Err(); err != nil && !errors.Is(err, io.EOF) {
		return err
	}

	return handler(models.PromptStreamChunk{Type: models.PromptStreamChunkTypeDone})
}

func (p *PromptService) pickModel(requested string) string {
	if strings.TrimSpace(requested) != "" {
		return requested
	}
	return p.defaultModel
}

type openAIChatRequest struct {
	Model       string              `json:"model"`
	Messages    []openAIChatMessage `json:"messages"`
	Stream      bool                `json:"stream"`
	Temperature *float32            `json:"temperature,omitempty"`
	MaxTokens   *int                `json:"max_tokens,omitempty"`
	TopP        *float32            `json:"top_p,omitempty"`
}

type openAIChatMessage struct {
	Role    string `json:"role"`
	Content string `json:"content"`
}

type openAIStreamChunk struct {
	Choices []struct {
		Delta struct {
			Content string `json:"content"`
			Role    string `json:"role"`
		} `json:"delta"`
	} `json:"choices"`
}

func convertMessages(messages []models.PromptMessage) []openAIChatMessage {
	out := make([]openAIChatMessage, 0, len(messages))
	for _, msg := range messages {
		role := sanitizeRole(msg.Role)
		if role == "" || strings.TrimSpace(msg.Content) == "" {
			continue
		}
		out = append(out, openAIChatMessage{
			Role:    role,
			Content: msg.Content,
		})
	}
	return out
}

func sanitizeRole(role string) string {
	switch strings.ToLower(strings.TrimSpace(role)) {
	case "system":
		return "system"
	case "assistant":
		return "assistant"
	case "tool":
		return "tool"
	default:
		return "user"
	}
}

// Ready reports whether the prompt service can reach DeepSeek.
func (p *PromptService) Ready() bool {
	return strings.TrimSpace(p.apiKey) != ""
}

// WithAPIKey allows tests to override the DeepSeek API key without mutating env.
func (p *PromptService) WithAPIKey(key string) {
	p.apiKey = key
}

// WithHTTPClient overrides the DeepSeek http client (useful for tests/mocks).
func (p *PromptService) WithHTTPClient(client *http.Client) {
	if client != nil {
		p.httpClient = client
	}
}
