0x7o commited on
Commit
93d9701
1 Parent(s): fae0084

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -8,7 +8,7 @@ if torch.cuda.is_available():
8
  model = AutoModelForCausalLM.from_pretrained("ai-forever/ruGPT-3.5-13B", load_in_8bit=True, device_map="auto")
9
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
10
 
11
- @spaces.GPU
12
  def predict(prompt, temperature, max_length):
13
  return pipe(prompt, temperature=temperature, max_length=max_length, top_p=0.95, top_k=50, do_sample=True)[0]["generated_text"]
14
 
 
8
  model = AutoModelForCausalLM.from_pretrained("ai-forever/ruGPT-3.5-13B", load_in_8bit=True, device_map="auto")
9
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
10
 
11
+ @spaces.GPU(duration=600)
12
  def predict(prompt, temperature, max_length):
13
  return pipe(prompt, temperature=temperature, max_length=max_length, top_p=0.95, top_k=50, do_sample=True)[0]["generated_text"]
14