kgd / app.py
gidae's picture
Update app.py
a831e24 verified
import gradio as gr
from openai import OpenAI
import os
client=OpenAI(api_key=os.environ.get('OPENAI_API_KEY'))
#์ƒ๋‹ด๋ด‡
def counseling_bot_chat(message, chat_history):
if message == '':
return "", chat_history
else:
completion=client.chat.completions.create(
model='gpt-3.5-turbo',
messages=[
{"role":'system','content':'๋‹น์‹ ์€ ๊ฒฝ์ œ ์ „๋ฌธ ๊ธฐ์ž ์ž…๋‹ˆ๋‹ค. ๊ฒฝ์ œ๋ถ„์•ผ์™€ ๊ด€๋ จ๋˜์ง€ ์•Š๋Š” ์งˆ๋ฌธ์—๋Š” ์ •์ค‘ํžˆ ๊ฑฐ์ ˆํ•˜์„ธ์š”.'},
{'role':'user','content':message}
]
)
chat_history.append([message, completion.choices[0].message.content])
return '', chat_history
def counseling_bot_undo(chat_history):
if len(chat_history) > 1:
chat_history.pop()
return chat_history
def counseling_bot_reset(messaage, chat_history):
chat_history=[[None, "์•ˆ๋…•ํ•˜์„ธ์š”. ๊ฒฝ์ œ ์ „๋ฌธ๊ธฐ์ž์ž…๋‹ˆ๋‹ค. ๊ถ๊ธˆํ•œ ์  ๋ฌผ์–ด๋ณด์„ธ์š”."]]
return chat_history
# ๋ฒˆ์—ญ๋ด‡
def translate_bot(output_conditions, output_language, input_text):
if input_text =='':
return ''
else:
if output_conditions == '':
output_conditions=''
else:
output_conditions='๋ฒˆ์—ญํ•  ๋•Œ์˜ ์กฐ๊ฑด์€ ๋‹ค์Œ๊ณผ ๊ฐ™์Šต๋‹ˆ๋‹ค.'+output_conditions
completion=client.chat.completions.create(
model='gpt-3.5-turbo',
messages=[
{'role':'system','content':'๋‹น์‹ ์€ ๋ฒˆ์—ญ๊ฐ€์ž…๋‹ˆ๋‹ค. ์ž…๋ ฅํ•œ ์–ธ์–ด๋ฅผ ๋‹ค๋ฅธ ์„ค๋ช… ์—†์ด ๊ณง๋ฐ”๋กœ {0}๋กœ ๋ฒˆ์—ญํ•ด์„œ ์•Œ๋ ค์ฃผ์„ธ์š”. ๋ฒˆ์—ญ์ด ๋ถˆ๊ฐ€๋Šฅํ•œ ์–ธ์–ด๋ผ๋ฉด ๋ฒˆ์—ญ์ด ๋ถˆ๊ฐ€๋Šฅํ•˜๋‹ค๊ณ  ๋งํ•œ ํ›„ ๊ทธ ์ด์œ ๋ฅผ ์„ค๋ช…ํ•ด ์ฃผ์„ธ์š”.{1}'.format(output_language,output_conditions)},
{'role':'user','content':input_text}
]
)
return completion.choices[0].message.content
# ์†Œ์„ค๋ด‡
def novel_bot(model, temperature, detail):
completion=client.chat.completions.create(
model=model,
temperature=temperature,
messages=[
{'role':'system','content':'๋‹น์‹ ์€ ์†Œ์„ค๊ฐ€ ์ž…๋‹ˆ๋‹ค. ์š”์ฒญํ•˜๋Š” ์กฐ๊ฑด์— ๋งž์ถฐ ์†Œ์„ค์„ ์ž‘์„ฑํ•ด ์ฃผ์„ธ์š”'},
{'role':'user','content':detail}
]
)
return completion.choices[0].message.content
with gr.Blocks(theme=gr.themes.Default()) as app:
with gr.Tab('์ƒ๋‹ด๋ด‡'):
gr.Markdown(
value="""
# <center> ์ƒ๋‹ด๋ด‡ </center>
<center>ํ—ค์ด๋งˆํŠธ ์ƒ๋‹ด๋ด‡์ž…๋‹ˆ๋‹ค. ๋งˆํŠธ์—์„œ ํŒํ•ดํ•˜๋Š” ์ƒํ’ˆ๊ณผ ๊ด€๋ จ๋œ ์งˆ๋ฌธ์— ๋‹ต๋ณ€๋“œ๋ฆฝ๋‹ˆ๋‹ค.</center>
"""
)
cb_chatbot=gr.Chatbot(
value=[[None, '์•ˆ๋…•ํ•˜์„ธ์š”. ๊ฒฝ์ œ ์ „๋ฌธ๊ธฐ์ž์ž…๋‹ˆ๋‹ค. ๊ถ๊ธˆํ•œ ์  ๋ฌผ์–ด๋ณด์„ธ์š”.']],
show_label=False
)
with gr.Row():
cb_user_input=gr.Text(
lines=1,
placeholder='์ž…๋ ฅ ์ฐฝ',
container=False,
scale=9
)
cb_send_btn=gr.Button(
value='๋ณด๋‚ด๊ธฐ',
scale=1,
variant='primary'
)
with gr.Row():
gr.Button(value='๋˜๋Œ๋ฆฌ๊ธฐ').click(fn=counseling_bot_undo, inputs=cb_chatbot,outputs=cb_chatbot)
gr.Button(value='์ดˆ๊ธฐํ™”').click(fn=counseling_bot_reset, inputs=cb_chatbot,outputs=cb_chatbot)
cb_send_btn.click(fn=counseling_bot_chat, inputs=[cb_user_input, cb_chatbot],outputs=[cb_user_input, cb_chatbot])
cb_user_input.submit(fn=counseling_bot_chat, inputs=[cb_user_input,cb_chatbot],outputs=[cb_user_input,cb_chatbot])
with gr.Tab('๋ฒˆ์—ญ๋ด‡'):
gr.Markdown(
value="""
#<center>๋ฒˆ์—ญ๋ด‡</center>
<center>๋‹ค๊ตญ์–ด ๋ฒˆ์—ญ๋ด‡์ž…๋‹ˆ๋‹ค.</center>
"""
)
with gr.Row():
tb_output_conditions=gr.Text(
label='๋ฒˆ์—ญ ์กฐ๊ฑด',
placeholder="์˜ˆ์‹œ: ์ž์—ฐ์Šค๋Ÿฝ๊ฒŒ",
lines=1,
max_lines=3,
)
tb_output_language=gr.Dropdown(
label='์ถœ๋ ฅ ์–ธ์–ด',
choices=['ํ•œ๊ตญ์–ด','์˜์–ด','์ผ๋ณธ์–ด','์ค‘๊ตญ์–ด'],
value='ํ•œ๊ตญ์–ด',
allow_custom_value=True,
interactive=True
)
tb_submit=gr.Button(
value='๋ฒˆ์—ญํ•˜๊ธฐ',
variant='primary'
)
with gr.Row():
tb_input_text=gr.Text(
placeholder='๋ฒˆ์—ญํ•  ๋‚ด์šฉ์„ ์ ์–ด ์ฃผ์„ธ์š”.',
lines=10,
max_lines=20,
show_copy_button=True,
label=''
)
tb_output_text=gr.Text(
lines=10,
max_lines=20,
show_copy_button=True,
label='',
interactive=False
)
tb_submit.click(
fn=translate_bot,
inputs=[tb_output_conditions,
tb_output_language,
tb_input_text],
outputs=tb_output_text
)
with gr.Tab('์†Œ์„ค๋ด‡'):
gr.Markdown(
value="""
# <center>์†Œ์„ค๋ด‡</center>
<center>์†Œ์„ค์„ ์ƒ์„ฑํ•ด ์ฃผ๋Š” ๋ด‡์ž…๋‹ˆ๋‹ค.</center>
"""
)
with gr.Accordion(label='์‚ฌ์šฉ์ž ์„ค์ •'):
with gr.Row():
with gr.Column(scale=1):
nb_model=gr.Dropdown(
label='๋ชจ๋ธ ์„ ํƒ',
choices=['gpt-3.5-turbo','gpt-3.5-turbo-16k'],
value='gpt-3.5-turbo',
interactive=True
)
nb_temperature=gr.Slider(
label='์ฐฝ์˜์„ฑ',
info='์ˆซ์ž๊ฐ€ ๋†’์„ ์ˆ˜๋ก ์ฐฝ์˜์ ',
minimum=0,
maximum=2,
step=0.1,
value=1,
interactive=True
)
nb_detail=gr.Text(
container=False,
placeholder='์†Œ์„ค์˜ ์„ธ๋ถ€์ ์ธ ์„ค์ •์„ ์ž‘์„ฑํ•ฉ๋‹ˆ๋‹ค.',
lines=8,
scale=4
)
nb_submit=gr.Button(
value='์ƒ์„ฑํ•˜๊ธฐ',
variant='primary'
)
nb_output=gr.Text(
label='',
placeholder='์ด๊ณณ์— ์†Œ์„ค์˜ ๋‚ด์šฉ์ด ์ถœ๋ ฅ๋ฉ๋‹ˆ๋‹ค.',
lines=10,
max_lines=200,
show_copy_button=True
)
nb_submit.click(
fn=novel_bot,
inputs=[nb_model, nb_temperature, nb_detail],
outputs=nb_output
)
app.launch()