import {
  SystemMessagePromptTemplate,
  HumanMessagePromptTemplate,
  ChatPromptTemplate,
} from "@langchain/core/prompts";
import { ChatOllama } from "@langchain/ollama";

function buildPrompt(sysTemplate, userTemplate) {
  const sysPt = SystemMessagePromptTemplate.fromTemplate(sysTemplate);
  const hPt = HumanMessagePromptTemplate.fromTemplate(userTemplate);
  return ChatPromptTemplate.fromMessages([sysPt, hPt]);
}

// 1. 系统提示词
const sysTemplate =
  "你是一位中国的专业导游，请一定使用中文向游客介绍{location}的{topic}。记住：一定要使用中文来介绍";

// 2. 用户输入提示词
const userTemplate = "当前用户的问题：{question}";

// 3. 拿到最终组合的提示词模板
const pt = buildPrompt(sysTemplate, userTemplate);

// 4. 做一个填充
const result = await pt.formatMessages({
  location: "北京",
  topic: "特产",
  question: "北京有哪些值得推荐的特产",
});

// 5. 创建模型实例
const model = new ChatOllama({
  model: "llama3",
  stream: true,
  temperature: 0.7,
});

const stream = await model.stream(result);

for await (const chunk of stream) {
  process.stdout.write(chunk.content);
}
