Spaces:
Sleeping
Sleeping
Sandhya
commited on
Commit
·
f5dcbeb
1
Parent(s):
c0705f0
First Commit
Browse files- app.py +15 -24
- mcp_server.py +1 -1
app.py
CHANGED
|
@@ -56,35 +56,26 @@ async def startup_event():
|
|
| 56 |
agent_instance = await get_agent()
|
| 57 |
|
| 58 |
|
| 59 |
-
def chat_function(user_message,history,model_id):
|
| 60 |
-
|
| 61 |
-
"""Handles a user question by prompting the agent to read a model card and answer.
|
| 62 |
-
Args:
|
| 63 |
-
user_message (str): The user's question.
|
| 64 |
-
history (list): Chat history for display.
|
| 65 |
-
model_id (str): Hugging Face repo ID
|
| 66 |
-
|
| 67 |
-
Returns:
|
| 68 |
-
Tuple[list,str]: Updated Chat history and empty string for clearing input."""
|
| 69 |
-
|
| 70 |
prompt=f"""You're an assistant helping with hugging face model cards.
|
| 71 |
-
|
| 72 |
-
|
| 73 |
-
|
| 74 |
-
history=history+[(user_message,None)]
|
| 75 |
try:
|
| 76 |
-
response=""
|
| 77 |
-
for output in agent_instance.run(prompt):
|
| 78 |
-
if hasattr(output,"content") and output.content:
|
| 79 |
-
response=output.content
|
| 80 |
-
final_response=response or "⚠️ Sorry, I couldn't generate a response."
|
| 81 |
-
history[-1]=(user_message,final_response)
|
| 82 |
except Exception as e:
|
| 83 |
-
history[-1]=(user_message,f"⚠️ Error: {str(e)}")
|
| 84 |
return history, ""
|
| 85 |
|
| 86 |
|
| 87 |
|
|
|
|
| 88 |
def create_gradio_app():
|
| 89 |
with gr.Blocks(title="Model Card Chatbot") as demo:
|
| 90 |
gr.Markdown("## 🤖 Model Card Chatbot\nAsk questions about Hugging Face model card")
|
|
@@ -92,8 +83,8 @@ def create_gradio_app():
|
|
| 92 |
model_id=gr.Textbox(label="MODEL ID", value="google/gemma-2-2b")
|
| 93 |
user_input=gr.Textbox(label="Your Question",value="Ask something about the model card .....")
|
| 94 |
send=gr.Button("Ask")
|
| 95 |
-
|
| 96 |
-
send.click(fn=chat_function,inputs=[user_input,
|
| 97 |
return demo
|
| 98 |
gradio_app=create_gradio_app()
|
| 99 |
app=gr.mount_gradio_app(app,gradio_app,path="/")
|
|
|
|
| 56 |
agent_instance = await get_agent()
|
| 57 |
|
| 58 |
|
| 59 |
+
async def chat_function(user_message, history, model_id):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 60 |
prompt=f"""You're an assistant helping with hugging face model cards.
|
| 61 |
+
First, run the tool `read_model_card` on repo_id `{model_id}` to get the model card.
|
| 62 |
+
Then answer this user question based on the model card:
|
| 63 |
+
User question: {user_message}"""
|
| 64 |
+
history = history + [(user_message, None)]
|
| 65 |
try:
|
| 66 |
+
response = ""
|
| 67 |
+
async for output in agent_instance.run(prompt):
|
| 68 |
+
if hasattr(output, "content") and output.content:
|
| 69 |
+
response = output.content
|
| 70 |
+
final_response = response or "⚠️ Sorry, I couldn't generate a response."
|
| 71 |
+
history[-1] = (user_message, final_response)
|
| 72 |
except Exception as e:
|
| 73 |
+
history[-1] = (user_message, f"⚠️ Error: {str(e)}")
|
| 74 |
return history, ""
|
| 75 |
|
| 76 |
|
| 77 |
|
| 78 |
+
|
| 79 |
def create_gradio_app():
|
| 80 |
with gr.Blocks(title="Model Card Chatbot") as demo:
|
| 81 |
gr.Markdown("## 🤖 Model Card Chatbot\nAsk questions about Hugging Face model card")
|
|
|
|
| 83 |
model_id=gr.Textbox(label="MODEL ID", value="google/gemma-2-2b")
|
| 84 |
user_input=gr.Textbox(label="Your Question",value="Ask something about the model card .....")
|
| 85 |
send=gr.Button("Ask")
|
| 86 |
+
chatbot=gr.Chatbot(label="chat")
|
| 87 |
+
send.click(fn=chat_function,inputs=[user_input,chatbot,model_id],outputs=[chatbot,user_input])
|
| 88 |
return demo
|
| 89 |
gradio_app=create_gradio_app()
|
| 90 |
app=gr.mount_gradio_app(app,gradio_app,path="/")
|
mcp_server.py
CHANGED
|
@@ -12,7 +12,7 @@ def read_model_card(repo_id:str)->str:
|
|
| 12 |
"""Tool to read and return the full model card from hugging face."""
|
| 13 |
try:
|
| 14 |
card=ModelCard.load(repo_id)
|
| 15 |
-
return json.dumps({"status":"success","readme":
|
| 16 |
|
| 17 |
except FileNotFoundError:
|
| 18 |
return json.dumps({"status":"error","message":"This model does not have a model card"})
|
|
|
|
| 12 |
"""Tool to read and return the full model card from hugging face."""
|
| 13 |
try:
|
| 14 |
card=ModelCard.load(repo_id)
|
| 15 |
+
return json.dumps({"status":"success","readme": card.text})
|
| 16 |
|
| 17 |
except FileNotFoundError:
|
| 18 |
return json.dumps({"status":"error","message":"This model does not have a model card"})
|