File size: 1,944 Bytes
110fe4c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
346dfa2
 
110fe4c
346dfa2
 
 
 
110fe4c
 
 
 
 
 
 
 
 
346dfa2
110fe4c
346dfa2
 
 
 
110fe4c
 
346dfa2
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
from langchain_community.chat_models import ChatOpenAI
from typing import *
from langchain.tools import BaseTool
import chainlit as cl
from chainlit.sync import run_sync
from datasets import load_dataset
from langchain.document_loaders import CSVLoader
from langchain.text_splitter import RecursiveCharacterTextSplitter
import os
import pandas as pd # the Colab runtime will already have this library installed - no need to `pip install`
from langchain_openai import OpenAIEmbeddings
from langchain.embeddings import CacheBackedEmbeddings
from langchain.storage import LocalFileStore
from langchain_community.vectorstores import FAISS
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_openai import ChatOpenAI

@cl.on_chat_start
def start():
    os.makedirs("embedding_cache", exist_ok=True)
    store = LocalFileStore("embedding_cache")

    openai_api_key = os.getenv('OPENAI_API_KEY')
    primary_embedder = OpenAIEmbeddings(api_key=openai_api_key)
    embedder = CacheBackedEmbeddings(primary_embedder, store)
    vector_store = FAISS.load_local("vector_store", primary_embedder, allow_dangerous_deserialization=True)

    # Create the components (chefs)
    prompt_template = ChatPromptTemplate.from_messages(
	[
	    ("system", "You are a helpful AI bot."),
	    ("human", "{user_input}"),
	]
    )

    retriever = vector_store.as_retriever()

    chat_model = ChatOpenAI(api_key=openai_api_key)
    parser = StrOutputParser()
    chain = prompt_template | chat_model | parser
    cl.user_session.set("chain", chain)

@cl.on_message
async def on_message(message: cl.Message):
    # This function will be called whenever a new message is received
    user_message = message.content
    print(f"User message: {user_message}")
    chain = cl.user_session.get("chain")
    res = chain.invoke({"user_input": user_message })
    await cl.Message(content=res).send()