package mold

import (
	"encoding/json"
	"net/http"
	modelai "wechat/src/model/ai"
	"wechat/src/tools"
)

type Ollama struct {
	Uri string                              `json:"uri"`
	Log map[string][]*modelai.OllamaMessage `json:"log"`
}

func OllamaInit() *Ollama {
	ollama := &Ollama{
		"http://localhost:11434/api/chat",
		make(map[string][]*modelai.OllamaMessage),
	}
	return ollama
}

func (o *Ollama) SendingQuestions(content string, username string) (string, error) {

	if content == "重置" {
		delete(o.Log, username)
		return "重置完毕", nil
	}

	if _, ok := o.Log[username]; !ok {
		//预设消息
		o.Log[username] = append(o.Log[username], &modelai.OllamaMessage{Content: "以后的对话请使用中文交流", Role: "user"})
	}
	o.Log[username] = append(o.Log[username], &modelai.OllamaMessage{Content: content, Role: "user"})

	body, _, err := tools.Post(o.Uri, nil, modelai.OllamaNewConfig(o.Log[username]), nil, &http.Client{})
	if err != nil {
		return "", err
	}
	var msg *modelai.OllamaBody
	if err := json.Unmarshal(body, &msg); err != nil {
		return "", err
	}
	o.Log[username] = append(o.Log[username], &msg.Message)
	return msg.Message.Content, nil
}
