Unggi commited on
Commit
05b48f9
β€’
1 Parent(s): 60452c9
Files changed (2) hide show
  1. app.py +24 -3
  2. requirements.txt +7 -0
app.py CHANGED
@@ -48,16 +48,37 @@ def inference(prompt):
48
  pad_token_id=tokenizer.pad_token_id,
49
  eos_token_id=tokenizer.eos_token_id,
50
  bos_token_id=tokenizer.bos_token_id,
51
- use_cache=True)
52
- outputs = tokenizer.decode(gen_ids[0])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
 
54
  return outputs
55
 
56
 
 
 
 
 
 
 
57
  demo = gr.Interface(
58
  fn=inference,
59
  inputs="text",
60
  outputs="text" #return κ°’
61
- ).launch() # launch(share=True)λ₯Ό μ„€μ •ν•˜λ©΄ μ™ΈλΆ€μ—μ„œ 접속 κ°€λŠ₯ν•œ 링크가 생성됨
62
 
63
  demo.launch()
 
48
  pad_token_id=tokenizer.pad_token_id,
49
  eos_token_id=tokenizer.eos_token_id,
50
  bos_token_id=tokenizer.bos_token_id,
51
+ use_cache=True,
52
+ do_sample=True,
53
+ top_k=50,
54
+ top_p=0.92,
55
+ num_return_sequences=3
56
+ )
57
+
58
+ outputs = []
59
+
60
+ for gen_id in gen_ids:
61
+ output = tokenizer.decode(gen_id.tolist(), skip_special_tokens=True)
62
+
63
+ if prompt in output:
64
+ output = output.replace(prompt, '')
65
+
66
+ output = output.split('.')[0]
67
+ outputs.append(output)
68
 
69
  return outputs
70
 
71
 
72
+ # demo = gr.Interface(
73
+ # fn=inference,
74
+ # inputs="text",
75
+ # outputs="text" #return κ°’
76
+ # ).launch() # launch(share=True)λ₯Ό μ„€μ •ν•˜λ©΄ μ™ΈλΆ€μ—μ„œ 접속 κ°€λŠ₯ν•œ 링크가 생성됨
77
+
78
  demo = gr.Interface(
79
  fn=inference,
80
  inputs="text",
81
  outputs="text" #return κ°’
82
+ ).launch(share=True) # launch(share=True)λ₯Ό μ„€μ •ν•˜λ©΄ μ™ΈλΆ€μ—μ„œ 접속 κ°€λŠ₯ν•œ 링크가 생성됨
83
 
84
  demo.launch()
requirements.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ konlpy
2
+ scikit-learn
3
+ numpy
4
+ gradio
5
+ torch
6
+ transformers
7
+ pandas