import "dotenv/config";
import { ChatOpenAI } from "@langchain/openai";
import { ChatMessageHistory } from "langchain/memory";
import { HumanMessage, AIMessage } from "@langchain/core/messages";
import {
  ChatPromptTemplate,
  MessagesPlaceholder,
} from "@langchain/core/prompts";
import { RunnableWithMessageHistory } from "@langchain/core/runnables";

const chatModel = new ChatOpenAI({
  apiKey: process.env.OPENAI_API_KEY,
  configuration: {
    baseURL: process.env.OPENAI_API_BASE,
  },
});

const prompt = ChatPromptTemplate.fromMessages([
  [
    "system",
    `You are a helpful assistant. Answer all questions to the best of your ability.
    You are talkative and provides lots of specific details from its context. 
    If the you does not know the answer to a question, it truthfully says you do not know.`,
  ],
  new MessagesPlaceholder("history_message"),
  ["human", "{input}"],
]);

const history = new ChatMessageHistory();
const chain = prompt.pipe(chatModel);

const chainWithHistory = new RunnableWithMessageHistory({
  runnable: chain,
  getMessageHistory: (_sessionId) => history,
  inputMessagesKey: "input",
  historyMessagesKey: "history_message",
});

const res1 = await chainWithHistory.invoke(
  {
    input: "你好，我叫张扬",
  },
  {
    configurable: { sessionId: "none" },
  }
);

// console.log(res1);

const res2 = await chainWithHistory.invoke(
  {
    input: "我的名字叫什么？请说出我的姓氏",
  },
  {
    configurable: { sessionId: "none" },
  }
);

// console.log(res2);

const res3 = await chainWithHistory.invoke(
  {
    input: "我在陕西",
  },
  {
    configurable: { sessionId: "none" },
  }
);

console.log(res3);

const messages = await history.getMessages();

console.log(messages);
