joyson072 commited on
Commit
8a78aa6
β€’
1 Parent(s): efcc2eb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +39 -55
app.py CHANGED
@@ -1,66 +1,50 @@
1
- import langchain
2
- #from pydantic import BaseModel, model_validator
3
- from langchain.embeddings.openai import OpenAIEmbeddings
4
- # from langchain.vectorstores import Chroma
5
- from langchain.vectorstores import FAISS
6
- from langchain.text_splitter import CharacterTextSplitter
7
- from langchain.llms import OpenAI
8
- from langchain.chains import VectorDBQA
9
- from langchain.chains import RetrievalQA
10
- from langchain.document_loaders import DirectoryLoader
11
- from langchain.chains import ConversationalRetrievalChain
12
- from langchain.memory import ConversationBufferMemory
13
- from langchain.evaluation.qa import QAGenerateChain
14
- import magic
15
- import os
16
  import streamlit as st
17
- from streamlit_chat import message
18
 
19
- st.title("Welcome to ExamPrepBot")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
 
21
- if 'responses' not in st.session_state:
22
- st.session_state['responses'] = ["How can I assist you?"]
 
23
 
24
- if 'requests' not in st.session_state:
25
- st.session_state['requests'] = []
26
 
27
- openai_api_key = os.getenv("OPENAI_API_KEY", "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")
28
- embeddings = OpenAIEmbeddings(openai_api_key=openai_api_key)
29
- new_db = FAISS.load_local("faiss_govt_RCV", embeddings)
30
- llm = OpenAI(openai_api_key=openai_api_key, temperature=0.0)
31
 
32
- # if 'buffer_memory' not in st.session_state:
33
- memory= ConversationBufferMemory(memory_key="chat_history", return_messages=True)
34
- retriever = new_db.as_retriever()
35
- chain = ConversationalRetrievalChain.from_llm(llm=llm, chain_type="stuff", memory= memory,retriever=retriever, verbose=False)
36
 
37
- # container for chat history
38
- response_container = st.container()
39
- # container for text box
40
- textcontainer = st.container()
 
 
 
41
 
 
42
 
43
- with textcontainer:
44
- query = st.text_input(label="Please Enter Your Prompt Here: ", placeholder="Ask me")
45
- if query:
46
- with st.spinner("Cooking..."):
47
- # conversation_string = get_conversation_string()
48
- # st.code(conversation_string)
49
- # refined_query = query_refiner(conversation_string, query)
50
- # st.subheader("Refined Query:")
51
- # st.write(refined_query)
52
- # context = find_match(refined_query)
53
- # print(context)
54
- response = chain.run(query)
55
- st.session_state.requests.append(query)
56
- st.session_state.responses.append(response)
57
- with response_container:
58
- if st.session_state['responses']:
59
 
60
- for i in range(len(st.session_state['responses'])):
61
- message(st.session_state['responses'][i],key=str(i))
62
- if i < len(st.session_state['requests']):
63
- message(st.session_state["requests"][i], is_user=True,key=str(i)+ '_user')
64
 
65
- # with st.expander('Message history'):
66
- # st.info(memory.buffer)
 
1
+ from typing import List, Optional
2
+
3
+ from pydantic import BaseModel, BaseSettings, SecretStr
 
 
 
 
 
 
 
 
 
 
 
 
4
  import streamlit as st
 
5
 
6
+ # Slide 3: Basic Model
7
+
8
+ class User(BaseModel):
9
+ id: int
10
+ name: str = "Jane Doe"
11
+
12
+ data = {"id": 19, "name": "Fanilo", "age": 179}
13
+ user = User(**data)
14
+ st.write(user)
15
+
16
+ data = {"id": "Fanilo", "name": 42}
17
+ user = User(**data)
18
+ #st.write(user)
19
+
20
+ # Slide 4: Hierarchical Model
21
 
22
+ class Address(BaseModel):
23
+ city: str
24
+ street: Optional[str]
25
 
 
 
26
 
27
+ class User(BaseModel):
28
+ id: int
29
+ name: str
30
+ addresses: List[Address]
31
 
 
 
 
 
32
 
33
+ data = {
34
+ "id": 42,
35
+ "name": "Fanilo",
36
+ "addresses": [{"city": "Paris"}, {"city": "Tokyo", "street": "こんにけは"}],
37
+ }
38
+ user = User(**data)
39
+ st.success(user.addresses[1].street)
40
 
41
+ # Slide 7: Secrets
42
 
43
+ class Settings(BaseSettings):
44
+ auth_key: SecretStr
45
+ api_key: str
 
 
 
 
 
 
 
 
 
 
 
 
 
46
 
47
+ class Config:
48
+ env_file = "settings.env"
 
 
49
 
50
+ st.write(Settings().dict())