Spaces:
Running
on
A10G
Running
on
A10G
Commit
·
c6f1661
1
Parent(s):
297ba74
Update app.py
Browse files
app.py
CHANGED
@@ -7,7 +7,7 @@ import torch
|
|
7 |
print(f"Is CUDA available: {torch.cuda.is_available()}")
|
8 |
print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
|
9 |
|
10 |
-
pipe_flan = pipeline("text2text-generation", model="google/flan-t5-
|
11 |
pipe_vanilla = pipeline("text2text-generation", model="t5-large", device="cuda:0")
|
12 |
|
13 |
examples = [
|
@@ -21,7 +21,7 @@ examples = [
|
|
21 |
]
|
22 |
|
23 |
title = "Flan T5 and Vanilla T5"
|
24 |
-
description = "This demo compares [T5-large](https://huggingface.co/t5-large) and [Flan-T5-large](https://huggingface.co/
|
25 |
|
26 |
def inference(text):
|
27 |
output_flan = pipe_flan(text, max_length=100)[0]["generated_text"]
|
|
|
7 |
print(f"Is CUDA available: {torch.cuda.is_available()}")
|
8 |
print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
|
9 |
|
10 |
+
pipe_flan = pipeline("text2text-generation", model="google/flan-t5-xl", device="cuda:0")
|
11 |
pipe_vanilla = pipeline("text2text-generation", model="t5-large", device="cuda:0")
|
12 |
|
13 |
examples = [
|
|
|
21 |
]
|
22 |
|
23 |
title = "Flan T5 and Vanilla T5"
|
24 |
+
description = "This demo compares [T5-large](https://huggingface.co/t5-large) and [Flan-T5-X-large](https://huggingface.co/google/flan-t5-xl). Note that T5 expects a very specific format of the prompts, so the examples below are not necessarily the best prompts to compare."
|
25 |
|
26 |
def inference(text):
|
27 |
output_flan = pipe_flan(text, max_length=100)[0]["generated_text"]
|