Danil commited on
Commit
abe5a52
β€’
1 Parent(s): 5f59db3
Files changed (2) hide show
  1. app.py +1 -1
  2. server.py +1 -1
app.py CHANGED
@@ -35,7 +35,7 @@ repetition_penalty = st.slider('repetition_penalty', 1.0, 10.0, 1.0)
35
 
36
  if gen:
37
  c.code('Generating...')
38
- req = f"http://0.0.0.0:8080?input_text={txt}&top_p={int(top_p*100)}&top_k={int(top_k*100)}&temperature={int(temperature*100)}&num_beams={int(num_beams*100)}&repetition_penalty={int(top_p*100)}&max_length={max_length}"
39
  res = requests.get(req)
40
  print('ok')
41
  c.code(json.loads(res.text))
 
35
 
36
  if gen:
37
  c.code('Generating...')
38
+ req = f"http://0.0.0.0:8080?input_text={txt}&top_p={top_p}&top_k={top_k}&temperature={temperature}&num_beams={num_beams}&repetition_penalty={repetition_penalty}&max_length={max_length}"
39
  res = requests.get(req)
40
  print('ok')
41
  c.code(json.loads(res.text))
server.py CHANGED
@@ -12,6 +12,6 @@ print('load ok')
12
  @app.get("/")
13
  def read_root(input_text, max_length, top_p, top_k, num_beams, temperature, repetition_penalty):
14
  inpt = tokenizer.encode(input_text, return_tensors="pt")
15
- out = model.generate(inpt, max_length=max_length, top_p=float(top_p)/100, top_k=float(top_k)/100, temperature=float(temperature)/100, num_beams=float(num_beams)/100, repetition_penalty=float(repetition_penalty)/100)
16
  res = tokenizer.decode(out[0])
17
  return {res}
 
12
  @app.get("/")
13
  def read_root(input_text, max_length, top_p, top_k, num_beams, temperature, repetition_penalty):
14
  inpt = tokenizer.encode(input_text, return_tensors="pt")
15
+ out = model.generate(inpt, max_length=int(max_length), top_p=float(top_p), top_k=float(top_k), temperature=float(temperature), num_beams=int(num_beams), repetition_penalty=float(repetition_penalty))
16
  res = tokenizer.decode(out[0])
17
  return {res}