Spaces:
Sleeping
Sleeping
Commit
·
e17ea47
1
Parent(s):
ededc7b
gpu
Browse files
app.py
CHANGED
@@ -13,12 +13,13 @@ goldfish_model = 'goldfish-models/' + model_name
|
|
13 |
config = AutoConfig.from_pretrained(goldfish_model, cache_dir=HF_CACHE)
|
14 |
tokenizer = AutoTokenizer.from_pretrained(goldfish_model, cache_dir=HF_CACHE)
|
15 |
model = AutoModelForCausalLM.from_pretrained(goldfish_model, config=config, cache_dir=HF_CACHE)
|
|
|
16 |
if torch.cuda.is_available():
|
17 |
model = model.cuda() # Load onto GPU
|
18 |
|
19 |
|
20 |
# Create text generation pipeline
|
21 |
-
text_generator = pipeline('text-generation', model=model, tokenizer=tokenizer)
|
22 |
|
23 |
# Function to generate text
|
24 |
@spaces.GPU
|
|
|
13 |
config = AutoConfig.from_pretrained(goldfish_model, cache_dir=HF_CACHE)
|
14 |
tokenizer = AutoTokenizer.from_pretrained(goldfish_model, cache_dir=HF_CACHE)
|
15 |
model = AutoModelForCausalLM.from_pretrained(goldfish_model, config=config, cache_dir=HF_CACHE)
|
16 |
+
|
17 |
if torch.cuda.is_available():
|
18 |
model = model.cuda() # Load onto GPU
|
19 |
|
20 |
|
21 |
# Create text generation pipeline
|
22 |
+
text_generator = pipeline('text-generation', model=model, tokenizer=tokenizer, device=0 if torch.cuda.is_available() else -1)
|
23 |
|
24 |
# Function to generate text
|
25 |
@spaces.GPU
|