package util

import (
	"github.com/gin-gonic/gin"
	"github.com/tmc/langchaingo/llms/ollama"
	"log"
	"net/http"
	"os"
)

// 创建大模型
func Ollama(c *gin.Context, modelName string) *ollama.LLM {
	llm, err := ollama.New(ollama.WithModel(modelName))
	if err != nil {
		log.Println("模型创建失败", err)
		c.JSON(http.StatusInternalServerError, gin.H{"msg": err})
		return nil
	}

	return llm
}
func init() {
	//设置ollama的地址，默认本地可以不进行设置
	os.Setenv("OLLAMA_HOST", "http://127.0.0.1:11434")
}
