Spaces:
Sleeping
Sleeping
app.py
CHANGED
@@ -13,7 +13,7 @@ def main():
|
|
13 |
st.subheader("UCLA DSU Project, Fall 2023")
|
14 |
st.markdown("Daniel Mendelevitch \n Terry Ming \n Casey Tattersall \n Sean Tjoa")
|
15 |
|
16 |
-
st.header("What Are
|
17 |
|
18 |
header_text = """A transformer is a specific type of neural network that uses a mechanism called self-attention to learn the context (and
|
19 |
thus meaning) of sequential data. Transformer-based models can be used in many different domains, such as processing language, predicting
|
@@ -70,7 +70,7 @@ def main():
|
|
70 |
# small_model = load_casey_model(tokenizer, device)
|
71 |
model = load_big_model(tokenizer, device)
|
72 |
|
73 |
-
st.markdown(
|
74 |
|
75 |
if st.button('Write my story!'):
|
76 |
placeholder = st.empty()
|
|
|
13 |
st.subheader("UCLA DSU Project, Fall 2023")
|
14 |
st.markdown("Daniel Mendelevitch \n Terry Ming \n Casey Tattersall \n Sean Tjoa")
|
15 |
|
16 |
+
st.header("What Are Transformers? πππ€")
|
17 |
|
18 |
header_text = """A transformer is a specific type of neural network that uses a mechanism called self-attention to learn the context (and
|
19 |
thus meaning) of sequential data. Transformer-based models can be used in many different domains, such as processing language, predicting
|
|
|
70 |
# small_model = load_casey_model(tokenizer, device)
|
71 |
model = load_big_model(tokenizer, device)
|
72 |
|
73 |
+
st.markdown(print(model))
|
74 |
|
75 |
if st.button('Write my story!'):
|
76 |
placeholder = st.empty()
|