Spaces:
Running
Running
import streamlit as st | |
import requests | |
import os | |
import time | |
# Define the endpoint and API key | |
api_url = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B-Instruct" | |
api_key = os.getenv('HFSecret') | |
headers = { | |
"Authorization": f"Bearer {api_key}" | |
} | |
# API call function | |
def call_huggingface_api(prompt): | |
data = {"inputs": prompt, "parameters": {"max_length": 500, "temperature": 0.5}} | |
response = requests.post(api_url, headers=headers, json=data) | |
if response.status_code != 200: | |
st.error(f"Error: {response.status_code} - {response.text}") | |
return None | |
return response.json() | |
# Function to load text from a URL | |
def load_text_from_url(url): | |
response = requests.get(url) | |
return response.text if response.status_code == 200 else "" | |
# Preset sample text options | |
options = ['None', 'Appreciation Letter', 'Regret Letter', 'Kindness Tale', 'Lost Melody Tale', 'Twitter Example 1', 'Twitter Example 2'] | |
url_dict = { | |
'Appreciation Letter': "https://raw.githubusercontent.com/peteciank/public_files/main/Transformers/Appreciation_Letter.txt", | |
'Regret Letter': "https://raw.githubusercontent.com/peteciank/public_files/main/Transformers/Regret_Letter.txt", | |
'Kindness Tale': "https://raw.githubusercontent.com/peteciank/public_files/main/Transformers/Kindness_Tale.txt", | |
'Lost Melody Tale': "https://raw.githubusercontent.com/peteciank/public_files/main/Transformers/Lost_Melody_Tale.txt", | |
'Twitter Example 1': "https://raw.githubusercontent.com/peteciank/public_files/main/Transformers/Twitter_Example_1.txt", | |
'Twitter Example 2': "https://raw.githubusercontent.com/peteciank/public_files/main/Transformers/Twitter_Example_2.txt" | |
} | |
# Streamlit layout | |
st.title("Sentiment Analysis, Summarization, and Keyword Extraction") | |
# Dropdown to select a text file | |
selected_option = st.selectbox("Select a preset option", options) | |
# Initialize text_input | |
text_input = "" | |
# Load text based on dropdown selection | |
if selected_option != 'None': | |
with st.spinner("Loading text..."): | |
text_input = load_text_from_url(url_dict[selected_option]) | |
time.sleep(1) # Simulate loading time | |
st.success("Text loaded!") | |
else: | |
text_input = st.text_area("Or enter your own text for analysis") | |
if st.button("Analyze"): | |
if text_input: | |
with st.spinner('Processing...'): | |
# Sentiment Analysis | |
sentiment_prompt = f"Perform sentiment analysis on the following text: {text_input}" | |
sentiment_result = call_huggingface_api(sentiment_prompt) | |
# Summarization | |
summarization_prompt = f"Summarize the following text: {text_input}" | |
summarization_result = call_huggingface_api(summarization_prompt) | |
# Keyword Extraction | |
keyword_prompt = f"Extract important keywords from the following text: {text_input}" | |
keyword_result = call_huggingface_api(keyword_prompt) | |
time.sleep(1) # Simulate a small delay | |
st.success('Analysis completed!') | |
# Display Results in Collapsible Expanders | |
if sentiment_result: | |
with st.expander("Sentiment Analysis (Conclusion)"): | |
st.write("Conclusion: Positive :) or Negative :( ") | |
st.write(sentiment_result[0]['generated_text']) | |
if summarization_result: | |
with st.expander("Summarization"): | |
st.write(summarization_result[0]['generated_text']) | |
if keyword_result: | |
with st.expander("Keyword Extraction"): | |
st.write(keyword_result[0]['generated_text'].split(',')) # Display keywords as list | |
else: | |
st.warning("Please enter some text for analysis.") | |