import requests as rq | |
import streamlit as st | |
import transformers | |
import torch | |
model_id = "meta-llama/Meta-Llama-3-8B" | |
pipeline = transformers.pipeline( | |
"text-generation", model=model_id, model_kwargs={"torch_dtype": torch.bfloat16}, device_map="auto" | |
) | |
def query(question): | |
return pipeline(question) | |
st.title("LLama3 8B T") | |
st.subheader("This is a demo of the LLama3 8B T model.") | |
user_input = st.text_area("Enter your text here:") | |
if st.button("Submit"): | |
answer = query(user_input) | |
st.write(answer) | |