File size: 522 Bytes
bbe376b
 
2158585
 
bbe376b
2158585
 
 
 
bbe376b
 
2158585
 
bbe376b
 
 
 
 
 
 
2158585
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
import requests as rq
import streamlit as st
import transformers
import torch

model_id = "meta-llama/Meta-Llama-3-8B"
pipeline = transformers.pipeline(
    "text-generation", model=model_id, model_kwargs={"torch_dtype": torch.bfloat16}, device_map="auto"
)


def query(question):
    return pipeline(question)


st.title("LLama3 8B T")
st.subheader("This is a demo of the LLama3 8B T model.")

user_input = st.text_area("Enter your text here:")
if st.button("Submit"):
    answer = query(user_input)
    st.write(answer)