Spaces:
Runtime error
Runtime error
JKilpatrick
commited on
Commit
·
9c01f8c
1
Parent(s):
249b547
update
Browse files- .chainlit/config.toml +84 -0
- app.py +23 -12
- chainlit.md +1 -1
.chainlit/config.toml
ADDED
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[project]
|
2 |
+
# Whether to enable telemetry (default: true). No personal data is collected.
|
3 |
+
enable_telemetry = true
|
4 |
+
|
5 |
+
# List of environment variables to be provided by each user to use the app.
|
6 |
+
user_env = []
|
7 |
+
|
8 |
+
# Duration (in seconds) during which the session is saved when the connection is lost
|
9 |
+
session_timeout = 3600
|
10 |
+
|
11 |
+
# Enable third parties caching (e.g LangChain cache)
|
12 |
+
cache = false
|
13 |
+
|
14 |
+
# Follow symlink for asset mount (see https://github.com/Chainlit/chainlit/issues/317)
|
15 |
+
# follow_symlink = false
|
16 |
+
|
17 |
+
[features]
|
18 |
+
# Show the prompt playground
|
19 |
+
prompt_playground = true
|
20 |
+
|
21 |
+
# Process and display HTML in messages. This can be a security risk (see https://stackoverflow.com/questions/19603097/why-is-it-dangerous-to-render-user-generated-html-or-javascript)
|
22 |
+
unsafe_allow_html = false
|
23 |
+
|
24 |
+
# Process and display mathematical expressions. This can clash with "$" characters in messages.
|
25 |
+
latex = false
|
26 |
+
|
27 |
+
# Authorize users to upload files with messages
|
28 |
+
multi_modal = true
|
29 |
+
|
30 |
+
# Allows user to use speech to text
|
31 |
+
[features.speech_to_text]
|
32 |
+
enabled = false
|
33 |
+
# See all languages here https://github.com/JamesBrill/react-speech-recognition/blob/HEAD/docs/API.md#language-string
|
34 |
+
# language = "en-US"
|
35 |
+
|
36 |
+
[UI]
|
37 |
+
# Name of the app and chatbot.
|
38 |
+
name = "Chatbot"
|
39 |
+
|
40 |
+
# Show the readme while the conversation is empty.
|
41 |
+
show_readme_as_default = true
|
42 |
+
|
43 |
+
# Description of the app and chatbot. This is used for HTML tags.
|
44 |
+
# description = ""
|
45 |
+
|
46 |
+
# Large size content are by default collapsed for a cleaner ui
|
47 |
+
default_collapse_content = true
|
48 |
+
|
49 |
+
# The default value for the expand messages settings.
|
50 |
+
default_expand_messages = false
|
51 |
+
|
52 |
+
# Hide the chain of thought details from the user in the UI.
|
53 |
+
hide_cot = false
|
54 |
+
|
55 |
+
# Link to your github repo. This will add a github button in the UI's header.
|
56 |
+
# github = ""
|
57 |
+
|
58 |
+
# Specify a CSS file that can be used to customize the user interface.
|
59 |
+
# The CSS file can be served from the public directory or via an external link.
|
60 |
+
# custom_css = "/public/test.css"
|
61 |
+
|
62 |
+
# Override default MUI light theme. (Check theme.ts)
|
63 |
+
[UI.theme.light]
|
64 |
+
#background = "#FAFAFA"
|
65 |
+
#paper = "#FFFFFF"
|
66 |
+
|
67 |
+
[UI.theme.light.primary]
|
68 |
+
#main = "#F80061"
|
69 |
+
#dark = "#980039"
|
70 |
+
#light = "#FFE7EB"
|
71 |
+
|
72 |
+
# Override default MUI dark theme. (Check theme.ts)
|
73 |
+
[UI.theme.dark]
|
74 |
+
#background = "#FAFAFA"
|
75 |
+
#paper = "#FFFFFF"
|
76 |
+
|
77 |
+
[UI.theme.dark.primary]
|
78 |
+
#main = "#F80061"
|
79 |
+
#dark = "#980039"
|
80 |
+
#light = "#FFE7EB"
|
81 |
+
|
82 |
+
|
83 |
+
[meta]
|
84 |
+
generated_by = "0.7.700"
|
app.py
CHANGED
@@ -51,7 +51,6 @@ def load_vectorstore():
|
|
51 |
|
52 |
text_field = "text"
|
53 |
|
54 |
-
|
55 |
vectorstore = Pinecone(
|
56 |
index,
|
57 |
embedder,
|
@@ -68,8 +67,9 @@ def qa_chain():
|
|
68 |
llm = load_llm()
|
69 |
retriever = vectorstore.as_retriever()
|
70 |
|
71 |
-
template = """You are a helpful assistant that answers questions on the provided context, if its not answered within the context respond with
|
72 |
-
Additionally, the context includes a specific integer formatted as <int>, representing a timestamp.
|
|
|
73 |
|
74 |
|
75 |
### CONTEXT
|
@@ -101,16 +101,27 @@ def qa_chain():
|
|
101 |
# =============================================================================
|
102 |
@cl.on_chat_start
|
103 |
async def on_chat_start():
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
|
109 |
@cl.on_message
|
110 |
async def on_message(message: cl.Message):
|
111 |
-
|
112 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
113 |
|
114 |
-
|
115 |
-
|
116 |
-
await cl.Message(content=answer).send()
|
|
|
51 |
|
52 |
text_field = "text"
|
53 |
|
|
|
54 |
vectorstore = Pinecone(
|
55 |
index,
|
56 |
embedder,
|
|
|
67 |
llm = load_llm()
|
68 |
retriever = vectorstore.as_retriever()
|
69 |
|
70 |
+
template = """You are a helpful assistant that answers questions on the provided context, if its not answered within the context respond with "This query is not directly mentioned by AI Makerspace" then respond the best to your ability.
|
71 |
+
Additionally, the context includes a specific integer formatted as <int>, representing a timestamp.
|
72 |
+
In your response, include this integer as a citation, formatted as a YouTube video link: "https://www.youtube.com/watch?v=[video_id]&t=<int>s" and text of link be the title of video.
|
73 |
|
74 |
|
75 |
### CONTEXT
|
|
|
101 |
# =============================================================================
|
102 |
@cl.on_chat_start
|
103 |
async def on_chat_start():
|
104 |
+
chain = qa_chain()
|
105 |
+
cl.user_session.set("chain", chain)
|
106 |
+
msg=cl.Message(content="What is your question about AI Makerspace?")
|
107 |
+
await msg.send()
|
108 |
|
109 |
@cl.on_message
|
110 |
async def on_message(message: cl.Message):
|
111 |
+
chain=cl.user_session.get("chain")
|
112 |
+
res = chain.invoke({"question" : message.content})
|
113 |
+
|
114 |
+
answer = res['response'].content
|
115 |
+
|
116 |
+
source_documents = set()
|
117 |
+
|
118 |
+
for document in res['context']:
|
119 |
+
source_url = document.metadata['source_document']
|
120 |
+
source_documents.add(source_url)
|
121 |
+
|
122 |
+
combined_message = answer + "\n\nSource Documents:\n" + "\n".join(source_documents)
|
123 |
+
|
124 |
+
await cl.Message(content=combined_message).send()
|
125 |
+
|
126 |
|
127 |
+
#await cl.Message(content=answer).send()
|
|
|
|
chainlit.md
CHANGED
@@ -12,5 +12,5 @@ Your interactive guide to the AI Makerspace YouTube channel as they guide us thr
|
|
12 |
## Example Prompts
|
13 |
- What is AI Makerspace?
|
14 |
- What is RAG?
|
15 |
-
- Who is the llm wizard?
|
16 |
- Summarize the livestream "How to Become an AI Engineer in 2024"
|
|
|
12 |
## Example Prompts
|
13 |
- What is AI Makerspace?
|
14 |
- What is RAG?
|
15 |
+
- Who is the llm wizard?
|
16 |
- Summarize the livestream "How to Become an AI Engineer in 2024"
|