Spaces:
Sleeping
Sleeping
from langchain_community.chat_models import ChatOpenAI | |
from langchain.prompts import ChatPromptTemplate | |
from langchain.schema import StrOutputParser | |
from langchain.schema.runnable import Runnable | |
from langchain.schema.runnable.config import RunnableConfig | |
import chainlit as cl | |
from langchain_community.embeddings import OpenAIEmbeddings | |
from langchain_community.vectorstores import FAISS | |
from langchain_core.runnables.passthrough import RunnablePassthrough | |
from langchain_openai import ChatOpenAI | |
# Function to read text from a file | |
def read_text_file(file_path): | |
try: | |
with open(file_path, 'r') as file: | |
content = file.read() | |
return content | |
except FileNotFoundError: | |
return "File not found." | |
except Exception as e: | |
return f"An error occurred: {e}" | |
async def on_chat_start(): | |
prompt = read_text_file("prompt") | |
# print(prompt) | |
#embeddings = OpenAIEmbeddings(model="text-embedding-3-large") | |
#vectorstore = FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True) | |
#retriever = vectorstore.as_retriever(search_kwargs={"k": 1}) | |
chat_model = ChatOpenAI(streaming=True, model="gpt-4") | |
prompt_template = ChatPromptTemplate.from_messages( | |
[ | |
( | |
"system", prompt | |
), | |
("human", "You are looking to create notes for the chapter, {topic} to teach effectively in the class"), | |
] | |
) | |
parser = StrOutputParser() | |
runnable_chain = ( | |
{ | |
"topic": RunnablePassthrough(), | |
} | |
| prompt_template | |
| chat_model | |
| parser | |
) | |
cl.user_session.set("runnable", runnable_chain) | |
async def on_message(message: cl.Message): | |
runnable = cl.user_session.get("runnable") # type: Runnable | |
msg = cl.Message(content="") | |
async for chunk in runnable.astream( | |
message.content, | |
config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]), | |
): | |
await msg.stream_token(chunk) | |
await msg.send() |