v-yihangzhai commited on
Commit
3b0b6c9
·
1 Parent(s): 7ac198e
Files changed (1) hide show
  1. app.py +4 -18
app.py CHANGED
@@ -1,25 +1,11 @@
1
  import gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
3
-
4
  def vicuna(input):
5
- tokenizer = AutoTokenizer.from_pretrained("gpt2")
6
- model = AutoModelForCausalLM.from_pretrained("gpt2")
7
- model.half().cuda()
8
 
9
- # prompt = """\
10
- # ### Human: Write a Python script for text classification using Transformers and PyTorch
11
- # ### Assistant:\
12
- # """
13
- prompt=input
14
- inputs = tokenizer(prompt, return_tensors='pt').to('cuda')
15
- tokens = model.generate(
16
- **inputs,
17
- max_new_tokens=256,
18
- do_sample=True,
19
- temperature=1.0,
20
- top_p=1.0,
21
- )
22
- print(tokenizer.decode(tokens[0], skip_special_tokens=True))
23
 
24
 
25
 
 
1
  import gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
3
+ from transformers import pipeline, set_seed
4
  def vicuna(input):
 
 
 
5
 
6
+ generator = pipeline('text-generation', model='gpt2')
7
+ set_seed(42)
8
+ generator(input, max_length=30, num_return_sequences=5)
 
 
 
 
 
 
 
 
 
 
 
9
 
10
 
11