from langchain_community.callbacks import get_openai_callback from langchain_core.prompts import PromptTemplate from langchain_openai import ChatOpenAI from prompts.outline_agent import template class OutlineAgent: def __init__(self, model: str = "gpt-4-1106-preview", temperature: float = 0.3) -> None: self._prompt = PromptTemplate(input_variables=["query"], template=template) self._llm = ChatOpenAI(model=model, temperature=temperature) self._chain = self._prompt | self._llm def run(self, query: str) -> tuple[str, dict]: with get_openai_callback() as cb: outline = self._chain.invoke({"query": query}).content.strip() tokens = cb.total_tokens cost = cb.total_cost themes = [] keys = ["title", "query", "description"] for theme in outline.split("\n\n")[1:]: items = theme.strip().split("\n") themes.append({keys[i]: item[item.index(":") + 1 :].strip().replace('"', "") for i, item in enumerate(items)}) return themes, {"tokens": tokens, "cost": cost}