Nick088 commited on
Commit
bcaeb63
1 Parent(s): eb7f4aa
Files changed (1) hide show
  1. app.py +2 -5
app.py CHANGED
@@ -3,9 +3,6 @@ import torch
3
  import random
4
  from transformers import T5Tokenizer, T5ForConditionalGeneration
5
 
6
- tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-small")
7
-
8
-
9
  if torch.cuda.is_available():
10
  device = "cuda"
11
  print("Using GPU")
@@ -13,7 +10,7 @@ else:
13
  device = "cpu"
14
  print("Using CPU")
15
 
16
-
17
  model = T5ForConditionalGeneration.from_pretrained("roborovski/superprompt-v1", torch_dtype=torch.float16)
18
 
19
  model.to(device)
@@ -85,7 +82,7 @@ examples = [
85
 
86
  gr.Interface(
87
  fn=generate,
88
- inputs=[precision_model, prompt, system_prompt, max_new_tokens, repetition_penalty, temperature, top_p, top_k, seed],
89
  outputs=gr.Textbox(label="Better Prompt"),
90
  title="SuperPrompt-v1",
91
  description="Make your prompts more detailed!<br>Model used: https://huggingface.co/roborovski/superprompt-v1<br>Hugging Face Space made by [Nick088](https://linktr.ee/Nick088)",
 
3
  import random
4
  from transformers import T5Tokenizer, T5ForConditionalGeneration
5
 
 
 
 
6
  if torch.cuda.is_available():
7
  device = "cuda"
8
  print("Using GPU")
 
10
  device = "cpu"
11
  print("Using CPU")
12
 
13
+ tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-small")
14
  model = T5ForConditionalGeneration.from_pretrained("roborovski/superprompt-v1", torch_dtype=torch.float16)
15
 
16
  model.to(device)
 
82
 
83
  gr.Interface(
84
  fn=generate,
85
+ inputs=[prompt, system_prompt, max_new_tokens, repetition_penalty, temperature, top_p, top_k, seed],
86
  outputs=gr.Textbox(label="Better Prompt"),
87
  title="SuperPrompt-v1",
88
  description="Make your prompts more detailed!<br>Model used: https://huggingface.co/roborovski/superprompt-v1<br>Hugging Face Space made by [Nick088](https://linktr.ee/Nick088)",