Spaces:
Sleeping
Sleeping
Pranjal Gupta
commited on
Commit
·
9d39028
1
Parent(s):
ed9c897
gradio text box xhange
Browse files
app.py
CHANGED
|
@@ -178,26 +178,27 @@ def gradio_rag_wrapper(message, history, token):
|
|
| 178 |
|
| 179 |
return response
|
| 180 |
|
| 181 |
-
def save_token(token):
|
| 182 |
-
return token
|
| 183 |
-
|
| 184 |
# Create the Gradio interface with multimodal input
|
| 185 |
with gr.Blocks(title="Contextual RAG Chatbot on Hugging Face Spaces") as demo:
|
| 186 |
gr.Markdown("## Contextual RAG Chatbot")
|
| 187 |
gr.Markdown("Please enter your Hugging Face Access Token to access gated models like Llama 3.2. You can generate a token from your [Hugging Face settings](https://huggingface.co/settings/tokens).")
|
| 188 |
|
| 189 |
-
|
| 190 |
-
|
| 191 |
-
|
| 192 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 193 |
|
| 194 |
-
# 2. Chatbot
|
| 195 |
chatbot = gr.ChatInterface(
|
| 196 |
-
fn=lambda message, history
|
| 197 |
-
state=token_state, # pass token state to the function
|
| 198 |
multimodal=True,
|
| 199 |
description="Upload a PDF file to start chatting!",
|
| 200 |
-
textbox=gr.MultimodalTextbox(file_types=[".pdf"],
|
| 201 |
)
|
| 202 |
|
| 203 |
if __name__ == "__main__":
|
|
|
|
| 178 |
|
| 179 |
return response
|
| 180 |
|
|
|
|
|
|
|
|
|
|
| 181 |
# Create the Gradio interface with multimodal input
|
| 182 |
with gr.Blocks(title="Contextual RAG Chatbot on Hugging Face Spaces") as demo:
|
| 183 |
gr.Markdown("## Contextual RAG Chatbot")
|
| 184 |
gr.Markdown("Please enter your Hugging Face Access Token to access gated models like Llama 3.2. You can generate a token from your [Hugging Face settings](https://huggingface.co/settings/tokens).")
|
| 185 |
|
| 186 |
+
newToken = ""
|
| 187 |
+
|
| 188 |
+
def tokenFunc (value) :
|
| 189 |
+
newToken = value
|
| 190 |
+
|
| 191 |
+
name = gr.Textbox(
|
| 192 |
+
label="Hugging Face Access Token",
|
| 193 |
+
)
|
| 194 |
+
|
| 195 |
+
print("toeken =---------->[0]", name.value)
|
| 196 |
|
|
|
|
| 197 |
chatbot = gr.ChatInterface(
|
| 198 |
+
fn=lambda message, history: gradio_rag_wrapper(message, history, name.value),
|
|
|
|
| 199 |
multimodal=True,
|
| 200 |
description="Upload a PDF file to start chatting!",
|
| 201 |
+
textbox=gr.MultimodalTextbox(file_types=[".pdf"]),
|
| 202 |
)
|
| 203 |
|
| 204 |
if __name__ == "__main__":
|