from langchain_community.llms import HuggingFaceEndpoint from langchain.chains import LLMChain from langchain.prompts import PromptTemplate import chainlit as cl import os repo_id = "tiiuae/falcon-7b-instruct" # Set the token as an environment variable os.environ["HUGGINGFACEHUB_API_TOKEN"] = HUGGINGFACEHUB_API_TOKEN llm = HuggingFaceEndpoint( repo_id=repo_id, max_length=128, temperature=0.5, token=HUGGINGFACEHUB_API_TOKEN ) template = """ You're a general chatbot that can answer any thing related to enterprises.{question} """ @cl.on_chat_start def main(): # Instantiate the chain for that user session prompt = PromptTemplate(template=template, input_variables=["question"]) llm_chain = LLMChain(prompt=prompt, llm=llm, verbose=True) # Store the chain in the user session cl.user_session.set("llm_chain", llm_chain) @cl.on_message async def main(message: cl.Message): # Retrieve the chain from the user session llm_chain = cl.user_session.get("llm_chain") # type: LLMChain # Call the chain asynchronously res = await llm_chain.acall(message.content, callbacks=[cl.AsyncLangchainCallbackHandler()]) await cl.Message(content=res["text"]).send()