Spaces:
Runtime error
Runtime error
ChihChiu29
commited on
Commit
·
3d24e16
1
Parent(s):
cd7f436
try again with xl
Browse files
main.py
CHANGED
@@ -18,8 +18,10 @@ from transformers import T5Tokenizer, T5ForConditionalGeneration
|
|
18 |
# model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-small")
|
19 |
# tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-base")
|
20 |
# model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-base")
|
21 |
-
tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-large")
|
22 |
-
model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-large")
|
|
|
|
|
23 |
|
24 |
token_size_limit = 128
|
25 |
|
|
|
18 |
# model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-small")
|
19 |
# tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-base")
|
20 |
# model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-base")
|
21 |
+
# tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-large")
|
22 |
+
# model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-large")
|
23 |
+
tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-xl")
|
24 |
+
model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-xl")
|
25 |
|
26 |
token_size_limit = 128
|
27 |
|