Spaces:
Sleeping
Sleeping
last commits
Browse files
app.py
CHANGED
|
@@ -1,32 +1,22 @@
|
|
| 1 |
-
import
|
| 2 |
from transformers import pipeline
|
| 3 |
|
| 4 |
-
#
|
| 5 |
-
|
|
|
|
| 6 |
|
| 7 |
-
#
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
model_name = "deepset/bert-large-uncased-whole-word-masking-squad2"
|
| 11 |
-
return pipeline("question-answering", model=model_name, tokenizer=model_name)
|
| 12 |
|
| 13 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 14 |
|
| 15 |
-
#
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
context = st.text_area("📄 Enter Context", height=200, value="""
|
| 19 |
-
Trainer: Jai Ganesh S;
|
| 20 |
-
Session 1: Intro about Arenas & LLMs;
|
| 21 |
-
Session 2: Hands-on on LLMs
|
| 22 |
-
Session 3: LangChain - Intro
|
| 23 |
-
""")
|
| 24 |
-
|
| 25 |
-
question = st.text_input("❓ Ask a Question", value="What's the session 1?")
|
| 26 |
-
|
| 27 |
-
if st.button("Get Answer"):
|
| 28 |
-
with st.spinner("Thinking..."):
|
| 29 |
-
QA_input = {"question": question, "context": context}
|
| 30 |
-
result = nlp(QA_input)
|
| 31 |
-
st.success(f"🧠 **Answer**: {result['answer']}")
|
| 32 |
-
st.caption(f"Score: {result['score']:.2f} | Start: {result['start']}, End: {result['end']}")
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
from transformers import pipeline
|
| 3 |
|
| 4 |
+
# Load pipeline
|
| 5 |
+
pipe = pipeline("sentiment-analysis")
|
| 6 |
+
# pipe = pipeline("text-classification", model="ProsusAI/finbert")
|
| 7 |
|
| 8 |
+
# Define function for prediction
|
| 9 |
+
def analyze(text):
|
| 10 |
+
return pipe(text)
|
|
|
|
|
|
|
| 11 |
|
| 12 |
+
# Build Gradio Interface
|
| 13 |
+
demo = gr.Interface(
|
| 14 |
+
fn=analyze,
|
| 15 |
+
inputs=gr.Textbox(label="Enter your input:", lines=3, placeholder="Type text here..."),
|
| 16 |
+
outputs=gr.JSON(label="Result"),
|
| 17 |
+
title="Sentiment Analysis",
|
| 18 |
+
description="Enter text and get sentiment analysis using Hugging Face Transformers."
|
| 19 |
+
)
|
| 20 |
|
| 21 |
+
# Launch
|
| 22 |
+
demo.launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|