dcddemo / app_bak.py
harrygens's picture
Create app_bak.py
2dc806b
import openai
import gradio as gr
import os
import logging
import json
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
openai.api_key = os.environ['key']
initMsg = os.environ['init']
roleAns = os.environ['roleAns']
classAsk = os.environ['classAsk']
classAns = os.environ['classAns']
defaultMsg = os.environ['defaultMsg']
mod = 3;
def localRace(race):
race = race
return race
def localClass(cls):
cls = cls
return cls
def trimMessages(messages):
# Assuming messages is an array of objects with "role" and "content" properties
content = {}
for message in messages:
if message["role"] == "assistant":
if "Current Status:" in message["content"]:
content = message["content"]
start_index = message["content"].index("Current Status:") + len("Current Status:")
end_index = message["content"].index("Wielding:")
new_content = message["content"][:start_index] + message["content"][end_index:]
message["content"] = new_content
if content != {}:
messages.append({"role": "assistant", "content": content})
return messages
def getChooseRole(msg):
start_index = msg.index("As a") + len("As a")
if start_index >= 0:
end_index = msg.index(",")
str = msg[start_index:] + msg[:end_index]
str = str.strip()
if str.lower() == "dm":
return ""
else:
return str
else:
return ""
def getChooseClass(msg):
full = getChooseRole(msg)
if full != "":
start_index = full.index(" ") + len(" ")
end_index = msg.index(",")
str = full[start_index:] + full[:end_index]
str = str.strip()
return str
else:
return ""
def chatbot(input, localvar):
#messages = messages or [{"role": "system", "content": initMsg}]
localvar = localvar or {"messages": [{"role": "system", "content": initMsg}], "race": "", "class": ""}
messages = localvar['messages']
#if len(messages) == 1:
#memory = trimMessages(messages)
#logging.info("put memory:"+printMessages(memory))
#chat = openai.ChatCompletion.create(
# model="gpt-3.5-turbo", messages=memory,
# max_tokens=1048,n=1,temperature=0.5,
#)
#logging.info("put memory return:"+chat.choices[0].message.content)
if input:
messages.append({"role": "user", "content": input})
chat = openai.ChatCompletion.create(
model="gpt-3.5-turbo", messages=messages[-2:],
max_tokens=2048,n=1,temperature=0.5,
)
reply = chat.choices[0].message.content
logging.info("AI Reply:"+reply)
ans = ""
if len(messages) == 2:
localvar['race'] = localRace(getChooseRole(reply))
ans = "I choose "+ localvar['race'] +" as my race." + classAsk
logging.info("ChooseRole:"+ans)
if len(messages) == 4:
localvar['class'] = getChooseClass(reply)
ans = "My Race is "+ localvar['race'] +",I choose "+ localvar['class'] +" as my class." + classAns
logging.info("ChooseClass:"+ans)
messages.append({"role": "assistant", "content": ans})
localvar['messages'] = messages
return reply, printMessages(messages), localvar
def printMessages(messages):
delimiter = '\n'
msg_string = delimiter.join([f"{obj['role']}:{obj['content']}" for obj in messages])
logging.info("messages:"+msg_string)
return msg_string
app = gr.Interface(fn=chatbot, inputs=[gr.Textbox(lines=7, label="You ask and answer questions below"), "state"],
outputs=[gr.Textbox(label="DND Game Reply", placeholder=roleAns), gr.Textbox(label="History"), "state"], title="DND Game",#, gr.Textbox(label="History"),
description="DND Game",theme="compact")
app.launch(share=False)