Spaces:
Sleeping
Sleeping
Updating app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
2 |
import transformers
|
3 |
import torch
|
|
|
4 |
|
5 |
model = "tiiuae/falcon-7b-instruct"
|
6 |
|
@@ -21,5 +22,6 @@ sequences = pipeline(
|
|
21 |
num_return_sequences=1,
|
22 |
eos_token_id=tokenizer.eos_token_id,
|
23 |
)
|
|
|
24 |
for seq in sequences:
|
25 |
-
|
|
|
1 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
2 |
import transformers
|
3 |
import torch
|
4 |
+
import streamlit as st
|
5 |
|
6 |
model = "tiiuae/falcon-7b-instruct"
|
7 |
|
|
|
22 |
num_return_sequences=1,
|
23 |
eos_token_id=tokenizer.eos_token_id,
|
24 |
)
|
25 |
+
st.title("Beyond the Anti-Jam: Integration of DRL with LLM")
|
26 |
for seq in sequences:
|
27 |
+
st.write(f"Result: {seq['generated_text']}")
|