ldhldh commited on
Commit
367ba66
โ€ข
1 Parent(s): 39770fa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -19
app.py CHANGED
@@ -181,8 +181,6 @@ Slender๋Š” ๋ง์„ ํ•˜์ง€ ๋ชปํ•ฉ๋‹ˆ๋‹ค.
181
  }
182
 
183
 
184
- from openai import OpenAI
185
- client = OpenAI()
186
  def cleanText(readData):
187
 
188
  #ํ…์ŠคํŠธ์— ํฌํ•จ๋˜์–ด ์žˆ๋Š” ํŠน์ˆ˜ ๋ฌธ์ž ์ œ๊ฑฐ
@@ -264,25 +262,14 @@ def chat(id, npc, text):
264
 
265
  """
266
 
267
- inputs = tokenizer("์•„", return_tensors="pt")["input_ids"]
268
- outputs = model.generate(inputs, do_sample=True, temperature=0.6, top_p=0.75, max_new_tokens=2)
269
- #output = tokenizer.decode(outputs[0])[len(prom)+3:-1].split("<")[0].split("###")[0].replace(". ", ".\n")
270
- #output = cleanText(output)
271
- #print(tokenizer.decode(outputs[0]))
272
  #output = f"{npc}์˜ ์‘๋‹ต์ž…๋‹ˆ๋‹ค."
273
 
274
-
275
- response = client.chat.completions.create(
276
- model=os.environ['MODEL'],
277
- messages=[
278
- {"role": "system", "content": prom},
279
- {"role": "user", "content": f"{npc}์˜ ๋Œ€๋‹ต์„ ๋‹ค์Œ ๋ฌธ์žฅ์— ๋งž์ถฐ ์ž์—ฐ์Šค๋Ÿฝ๊ฒŒ ์ž‘์„ฑํ•ด์ฃผ์„ธ์š”. ํ•œ ๋ฌธ์žฅ๋งŒ ์ž‘์„ฑํ•˜์„ธ์š”.\n\n{id}:" + text+f"\n\n{npc}:"},
280
- ]
281
- )
282
- output = response.choices[0].message.content
283
- output = output.replace(".",".\n")
284
- time.sleep(10)
285
- print(output)
286
  history[npc][id] += f"{id}:{text}"
287
  else:
288
  output = "no model, GPU๋ฅผ ๋” ๊ณต์œ ํ•ด์ฃผ์„ธ์š”."
 
181
  }
182
 
183
 
 
 
184
  def cleanText(readData):
185
 
186
  #ํ…์ŠคํŠธ์— ํฌํ•จ๋˜์–ด ์žˆ๋Š” ํŠน์ˆ˜ ๋ฌธ์ž ์ œ๊ฑฐ
 
262
 
263
  """
264
 
265
+ inputs = tokenizer(prom+f"{npc}์˜ ๋Œ€๋‹ต์„ ๋‹ค์Œ ๋ฌธ์žฅ์— ๋งž์ถฐ ์ž์—ฐ์Šค๋Ÿฝ๊ฒŒ ์ž‘์„ฑํ•ด์ฃผ์„ธ์š”. ํ•œ ๋ฌธ์žฅ๋งŒ ์ž‘์„ฑํ•˜์„ธ์š”.\n\n{id}:" + text+f"\n\n{npc}:"},, return_tensors="pt")["input_ids"]
266
+ outputs = model.generate(inputs, do_sample=True, temperature=0.8, top_p=0.75, max_new_tokens=200)
267
+ output = tokenizer.decode(outputs[0])[len(prom)+3:-1].split("<")[0].split("###")[0].replace(". ", ".\n")
268
+ output = cleanText(output)
269
+ print(tokenizer.decode(outputs[0]))
270
  #output = f"{npc}์˜ ์‘๋‹ต์ž…๋‹ˆ๋‹ค."
271
 
272
+ #print(output)
 
 
 
 
 
 
 
 
 
 
 
273
  history[npc][id] += f"{id}:{text}"
274
  else:
275
  output = "no model, GPU๋ฅผ ๋” ๊ณต์œ ํ•ด์ฃผ์„ธ์š”."