CaculaybyTR / Streamlit.Py
Shinhati2023's picture
Create Streamlit.Py
d6bbfef verified
raw
history blame
777 Bytes
# Import the required libraries
import streamlit as st
from transformers import pipeline
# Create a calculator function using the Llama model
def calculator(expression):
# Initialize the Llama model from Hugging Face
model = pipeline("text2text-generation", model="EleutherAI/llama")
# Generate the answer using the model
answer = model(expression)[0]["generated_text"]
# Return the answer
return answer
# Create a Streamlit app to display the calculator
st.title("Calculator using Llama model")
# Get the user input
expression = st.text_input("Enter an expression to calculate:")
# Check if the input is not empty
if expression:
# Call the calculator function and display the answer
answer = calculator(expression)
st.write(f"The answer is: {answer}")