import os from config import * from pydantic import BaseModel from langchain_openai import ChatOpenAI from utils import load_logs, read_documents, preprocess_logs from langchain_core.prompts import FewShotPromptTemplate, PromptTemplate llm = ChatOpenAI(model=MODEL) class SubmitQuestionAndDocumentsResponse(BaseModel): pass # The response body doesn't matter for this endpoint if __name__ == '__main__': documents = [ "https://storage.googleapis.com/cleric-assignment-call-logs/call_log_20240314_104111.txt", "https://storage.googleapis.com/cleric-assignment-call-logs/call_log_20240315_104111.txt", "https://storage.googleapis.com/cleric-assignment-call-logs/call_log_20240316_104111.txt" ] logs = read_documents(documents) processed_logs = preprocess_logs(logs) example_prompt = PromptTemplate( template=example_template, input_variables=["question", "logs", "answer"] ) prompt = FewShotPromptTemplate( examples=examples, example_prompt=example_prompt, suffix=template, input_variables=["question", "logs"] ) prompt_formatted = prompt.format( question='What product design decisions did the team make?', logs=processed_logs ) print(prompt_formatted) # bullets = llm.predict(prompt_formatted) # print(bullets)