|
import gradio as gr |
|
from openai import OpenAI |
|
import os |
|
import openai |
|
|
|
|
|
|
|
|
|
openai.api_key ="sk-9hWFK-_5UQgzJMqT44ng72sZbKQXVvVXeVd11yoER3T3BlbkFJmd2HDc-N7CaH6PDRCYZfqe8hkT7DnxZNwcVkf3eeUA" |
|
|
|
import gradio as gr |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
from langchain_openai import ChatOpenAI |
|
|
|
|
|
from langchain.memory import ConversationBufferMemory |
|
|
|
|
|
|
|
from langchain.chains import ConversationChain |
|
from langchain.schema import AIMessage, HumanMessage, SystemMessage |
|
|
|
from langchain_community.document_loaders import TextLoader |
|
from langchain_community.document_loaders import PyPDFLoader |
|
from langchain_core.runnables import RunnableWithMessageHistory |
|
|
|
|
|
|
|
llm = ChatOpenAI(temperature=0.0, model='gpt-3.5-turbo', openai_api_key="sk-9hWFK-_5UQgzJMqT44ng72sZbKQXVvVXeVd11yoER3T3BlbkFJmd2HDc-N7CaH6PDRCYZfqe8hkT7DnxZNwcVkf3eeUA") |
|
|
|
memory = ConversationBufferMemory() |
|
|
|
|
|
|
|
conversation = ConversationChain( |
|
llm=llm, |
|
memory=memory) |
|
|
|
|
|
|
|
|
|
|
|
def get_session_history(): |
|
return [] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def counseling_bot_chat(message, chat_history): |
|
if message == "": |
|
return "", chat_history |
|
else: |
|
result_message = "" |
|
if len(chat_history) <= 1: |
|
messages = [ |
|
SystemMessage(content="λΉμ μ ν€μ΄λ§νΈμ μλ΄μμ
λλ€. λ§νΈ μνκ³Ό κ΄λ ¨λμ§ μμ μ§λ¬Έμλ μ μ€ν κ±°μ νμΈμ."), |
|
AIMessage(content="μλ
νμΈμ, ν€μ΄λ§νΈμ
λλ€. μλ΄μ λμλλ¦¬κ² μ΅λλ€."), |
|
HumanMessage(content=message) |
|
] |
|
result_message = conversation.predict(input=messages) |
|
else: |
|
result_message = conversation.predict(input=message) |
|
|
|
chat_history.append([message, result_message]) |
|
return "", chat_history |
|
|
|
|
|
def counseling_bot_undo(chat_history): |
|
if len(chat_history) > 1: |
|
chat_history.pop() |
|
return chat_history |
|
|
|
|
|
def counseling_bot_reset(chat_history): |
|
chat_history = [[None, "μλ
νμΈμ, ν€μ΄λ§νΈμ
λλ€. μλ΄μ λμλλ¦¬κ² μ΅λλ€."]] |
|
return chat_history |
|
|
|
|
|
|
|
def translate_bot(output_conditions, output_language, input_text): |
|
if input_text == "": |
|
return "" |
|
else: |
|
if output_conditions == "": |
|
output_conditions = "" |
|
else: |
|
output_conditions = "λ²μν λμ 쑰건μ λ€μκ³Ό κ°μ΅λλ€. " + output_conditions |
|
|
|
completion = client.chat.completions.create( |
|
model="gpt-3.5-turbo", |
|
messages=[ |
|
{"role": "system", "content": "λΉμ μ λ²μκ°μ
λλ€. μ
λ ₯ν μΈμ΄λ₯Ό λ€λ₯Έ μ€λͺ
μμ΄ κ³§λ°λ‘ {0}λ‘ λ²μν΄μ μλ €μ£ΌμΈμ. λ²μμ΄ λΆκ°λ₯ν μΈμ΄λΌλ©΄ λ²μμ΄ λΆκ°λ₯νλ€κ³ λ§ν ν κ·Έ μ΄μ λ₯Ό μ€λͺ
ν΄μ£ΌμΈμ. {1}".format(output_language, output_conditions)}, |
|
{"role": "user", "content": input_text} |
|
]) |
|
|
|
return completion.choices[0].message.content |
|
|
|
|
|
def translate_bot_Text_upload(files): |
|
loader = TextLoader(files) |
|
document = loader.load() |
|
return document[0].page_content |
|
|
|
|
|
def translate_bot_PDF_upload(files): |
|
loader = PyPDFLoader(files) |
|
pages = loader.load_and_split() |
|
return pages[0].page_content |
|
|
|
|
|
|
|
def novel_bot(model, temperature, detail): |
|
completion = client.chat.completions.create( |
|
model=model, |
|
temperature=temperature, |
|
messages=[ |
|
{"role": "system", "content": "λΉμ μ μμ€κ°μ
λλ€. μμ²νλ 쑰건μ λ§μΆ° μμ€μ μμ±ν΄μ£ΌμΈμ."}, |
|
{"role": "user", "content": detail} |
|
]) |
|
return completion.choices[0].message.content |
|
|
|
|
|
|
|
with gr.Blocks(theme=gr.themes.Default()) as app: |
|
with gr.Tab("μλ΄λ΄"): |
|
|
|
gr.Markdown( |
|
value=""" |
|
# <center>μλ΄λ΄</center> |
|
<center>ν€μ΄λ§νΈ μλ΄ λ΄μ
λλ€. λ§νΈμμ ν맀νλ μνκ³Ό κ΄λ ¨λ μ§λ¬Έμ λ΅λ³λ립λλ€.</center> |
|
""") |
|
|
|
cb_chatbot = gr.Chatbot( |
|
value=[[None, "μλ
νμΈμ, ν€μ΄λ§νΈμ
λλ€. μλ΄μ λμλλ¦¬κ² μ΅λλ€."]], |
|
show_label=False |
|
) |
|
with gr.Row(): |
|
|
|
cb_user_input = gr.Text( |
|
lines=1, |
|
placeholder="μ
λ ₯ μ°½", |
|
container=False, |
|
scale=9 |
|
) |
|
|
|
cb_send_btn = gr.Button( |
|
value="보λ΄κΈ°", |
|
scale=1, |
|
variant="primary", |
|
icon="https://cdn-icons-png.flaticon.com/128/12439/12439334.png" |
|
) |
|
with gr.Row(): |
|
|
|
gr.Button(value="β©οΈ λλ리기").click(fn=counseling_bot_undo, inputs=cb_chatbot, outputs=cb_chatbot) |
|
|
|
gr.Button(value="ποΈ μ΄κΈ°ν").click(fn=counseling_bot_reset, inputs=cb_chatbot, outputs=cb_chatbot) |
|
|
|
cb_send_btn.click(fn=counseling_bot_chat, inputs=[cb_user_input, cb_chatbot], outputs=[cb_user_input, cb_chatbot]) |
|
|
|
cb_user_input.submit(fn=counseling_bot_chat, inputs=[cb_user_input, cb_chatbot], outputs=[cb_user_input, cb_chatbot]) |
|
|
|
with gr.Tab("λ²μλ΄"): |
|
|
|
gr.Markdown( |
|
value=""" |
|
# <center>λ²μλ΄</center> |
|
<center>λ€κ΅μ΄ λ²μ λ΄μ
λλ€.</center> |
|
""") |
|
with gr.Row(): |
|
|
|
tb_output_conditions = gr.Text( |
|
label="λ²μ 쑰건", |
|
placeholder="μμ: μμ°μ€λ½κ²", |
|
lines=1, |
|
max_lines=3 |
|
) |
|
|
|
tb_output_language = gr.Dropdown( |
|
label="μΆλ ₯ μΈμ΄", |
|
choices=["νκ΅μ΄", "μμ΄", "μΌλ³Έμ΄", "μ€κ΅μ΄"], |
|
value="νκ΅μ΄", |
|
allow_custom_value=True, |
|
interactive=True |
|
) |
|
with gr.Row(): |
|
|
|
tb_TXTupload = gr.UploadButton(label="π Txt μ
λ‘λ") |
|
|
|
tb_PDFupload = gr.UploadButton(label="π€ PDF μ
λ‘λ") |
|
|
|
tb_submit = gr.Button( |
|
value="λ²μνκΈ°", |
|
variant="primary" |
|
) |
|
with gr.Row(): |
|
|
|
tb_input_text = gr.Text( |
|
placeholder="λ²μν λ΄μ©μ μ μ΄μ£ΌμΈμ.", |
|
lines=10, |
|
max_lines=20, |
|
show_copy_button=True, |
|
label="" |
|
) |
|
|
|
tb_output_text = gr.Text( |
|
lines=10, |
|
max_lines=20, |
|
show_copy_button=True, |
|
label="", |
|
interactive=False |
|
) |
|
|
|
tb_submit.click( |
|
fn=translate_bot, |
|
inputs=[tb_output_conditions, |
|
tb_output_language, |
|
tb_input_text], |
|
outputs=tb_output_text |
|
) |
|
|
|
tb_TXTupload.upload( |
|
fn=translate_bot_Text_upload, |
|
inputs=tb_TXTupload, |
|
outputs=tb_input_text |
|
) |
|
|
|
tb_PDFupload.upload( |
|
fn=translate_bot_PDF_upload, |
|
inputs=tb_PDFupload, |
|
outputs=tb_input_text |
|
) |
|
|
|
with gr.Tab("μμ€λ΄"): |
|
|
|
gr.Markdown( |
|
value=""" |
|
# <center>μμ€λ΄</center> |
|
<center>μμ€μ μμ±ν΄μ£Όλ λ΄μ
λλ€.</center> |
|
""") |
|
with gr.Accordion(label="μ¬μ©μ μ€μ "): |
|
with gr.Row(): |
|
with gr.Column(scale=1): |
|
|
|
nb_model = gr.Dropdown( |
|
label="λͺ¨λΈ μ ν", |
|
choices=["gpt-3.5-turbo", "gpt-3.5-turbo-16k", "gpt-4", "gpt-4-32k", "gpt-4-1106-preview"], |
|
value="gpt-4-1106-preview", |
|
interactive=True |
|
) |
|
|
|
nb_temperature = gr.Slider( |
|
label="μ°½μμ±", |
|
info="μ«μκ° λμ μλ‘ μ°½μμ ", |
|
minimum=0, |
|
maximum=2, |
|
step=0.1, |
|
value=1, |
|
interactive=True |
|
) |
|
|
|
nb_detail = gr.Text( |
|
container=False, |
|
placeholder="μμ€μ μΈλΆμ μΈ μ€μ μ μμ±ν©λλ€.", |
|
lines=8, |
|
scale=4 |
|
) |
|
|
|
nb_submit = gr.Button( |
|
value="μμ±νκΈ°", |
|
variant="primary" |
|
) |
|
|
|
nb_output = gr.Text( |
|
label="", |
|
placeholder="μ΄κ³³μ μμ€μ λ΄μ©μ΄ μΆλ ₯λ©λλ€.", |
|
lines=10, |
|
max_lines=200, |
|
show_copy_button=True |
|
) |
|
|
|
nb_submit.click( |
|
fn=novel_bot, |
|
inputs=[nb_model, nb_temperature, nb_detail], |
|
outputs=nb_output |
|
) |
|
|
|
app.launch() |