mcgame commited on
Commit
407caf6
1 Parent(s): 47735dd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -27
app.py CHANGED
@@ -1,29 +1,12 @@
1
- # 加载模型
2
- from transformers import T5Tokenizer, T5ForConditionalGeneration
3
- tokenizer = T5Tokenizer.from_pretrained("ClueAI/ChatYuan-large-v1")
4
- model = T5ForConditionalGeneration.from_pretrained("ClueAI/ChatYuan-large-v1")
5
- # 使用
6
- import torch
7
- from transformers import AutoTokenizer
8
- # 修改colab笔记本设置为gpu,推理更快
9
- device = torch.device('cuda')
10
- model.to(device)
11
- def preprocess(text):
12
- text = text.replace("\n", "\\n").replace("\t", "\\t")
13
- return text
14
 
15
- def postprocess(text):
16
- return text.replace("\\n", "\n").replace("\\t", "\t")
 
17
 
18
- def answer(text, sample=True, top_p=1, temperature=0.7):
19
- '''sample:是否抽样。生成任务,可以设置为True;
20
- top_p:0-1之间,生成的内容越多样'''
21
- text = preprocess(text)
22
- encoding = tokenizer(text=[text], truncation=True, padding=True, max_length=768, return_tensors="pt").to(device)
23
- if not sample:
24
- out = model.generate(**encoding, return_dict_in_generate=True, output_scores=False, max_new_tokens=512, num_beams=1, length_penalty=0.6)
25
- else:
26
- out = model.generate(**encoding, return_dict_in_generate=True, output_scores=False, max_new_tokens=512, do_sample=True, top_p=top_p, temperature=temperature, no_repeat_ngram_size=3)
27
- out_text = tokenizer.batch_decode(out["sequences"], skip_special_tokens=True)
28
- return postprocess(out_text[0])
29
- print("end...")
 
1
+ import gradio as gr
 
 
 
 
 
 
 
 
 
 
 
 
2
 
3
+ description = "Story generation with GPT-2"
4
+ title = "Generate your own story"
5
+ examples = [["Adventurer is approached by a mysterious stranger in the tavern for a new quest."]]
6
 
7
+ interface = gr.Interface.load("huggingface/pranavpsv/gpt2-genre-story-generator",
8
+ description=description,
9
+ examples=examples
10
+ )
11
+
12
+ interface.launch()