imperialwool commited on
Commit
b995a3b
1 Parent(s): c312f48

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -2
app.py CHANGED
@@ -11,7 +11,7 @@ model.eval()
11
  with open('../system.prompt', 'r', encoding='utf-8') as f:
12
  prompt = f.read()
13
 
14
- @app.post("/echo")
15
  async def echo():
16
  data = await request.get_json()
17
  if data.get("max_tokens") != None and data.get("max_tokens") > 500: data['max_tokens'] = 500
@@ -23,4 +23,8 @@ async def echo():
23
  do_sample=random.choice([True, False]), temperature=float(random.randint(7,20)) / 10.0,
24
  max_new_tokens=data.get("max_tokens") or random.randomint(200,500),
25
  eos_token_id=tokenizer.eos_token_id, return_full_text = False)
26
- return {"output": tokenizer.decode(output_ids[0], skip_special_tokens=True)}
 
 
 
 
 
11
  with open('../system.prompt', 'r', encoding='utf-8') as f:
12
  prompt = f.read()
13
 
14
+ @app.post("/request")
15
  async def echo():
16
  data = await request.get_json()
17
  if data.get("max_tokens") != None and data.get("max_tokens") > 500: data['max_tokens'] = 500
 
23
  do_sample=random.choice([True, False]), temperature=float(random.randint(7,20)) / 10.0,
24
  max_new_tokens=data.get("max_tokens") or random.randomint(200,500),
25
  eos_token_id=tokenizer.eos_token_id, return_full_text = False)
26
+ return {"output": tokenizer.decode(output_ids[0], skip_special_tokens=True)}
27
+
28
+ @app.get("/")
29
+ async def get():
30
+ return "better to run it on own container"