Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -2,13 +2,13 @@ import torch
|
|
2 |
import streamlit as st
|
3 |
from transformers import AutoTokenizer, T5Tokenizer, T5ForConditionalGeneration, GenerationConfig, AutoModelForCausalLM
|
4 |
|
5 |
-
st.title('How
|
6 |
|
7 |
model_checkpoint = st.selectbox(
|
8 |
"Model:",
|
9 |
("google/flan-t5-base", "google/flan-t5-xl")
|
10 |
)
|
11 |
-
temperature = st.number_input('Temperature: ', min_value=0, max_value=1, value=0.5, format='%f')
|
12 |
|
13 |
tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)
|
14 |
|
|
|
2 |
import streamlit as st
|
3 |
from transformers import AutoTokenizer, T5Tokenizer, T5ForConditionalGeneration, GenerationConfig, AutoModelForCausalLM
|
4 |
|
5 |
+
st.title('How do LLM choose their words?')
|
6 |
|
7 |
model_checkpoint = st.selectbox(
|
8 |
"Model:",
|
9 |
("google/flan-t5-base", "google/flan-t5-xl")
|
10 |
)
|
11 |
+
temperature = st.number_input('Temperature: ', min_value=0.0, max_value=1.0, value=0.5, format='%f')
|
12 |
|
13 |
tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)
|
14 |
|