import os import gradio as gr # import openai from openai import OpenAI from dotenv import load_dotenv from pathlib import Path from time import sleep load_dotenv() key = os.getenv('OPENAI_API_KEY') users = os.getenv('LOGNAME') unames = users.split(',') pwds = os.getenv('PASSWORD') pwdList = pwds.split(',') site = os.getenv('SITE') if site == 'local': dp = Path('./data') dp.mkdir(exist_ok=True) dataDir = './data/' else: dp = Path('/data') dp.mkdir(exist_ok=True) dataDir = '/data/' client = OpenAI(api_key = key) def genUsageStats(do_reset=False): result = [] for user in unames: tokens = 0 fp = dataDir + user + '_log.txt' if os.path.exists(fp): accessOk = False for i in range(3): try: with open(fp) as f: dataList = f.readlines() if do_reset: os.remove(fp) for line in dataList: (u, t) = line.split(':') tokens += int(t) accessOk = True break except: sleep(3) if not accessOk: return f'File access failed reading stats for user: {user}' else: total = 0 result.append([user, str(tokens)]) return result def clear(): return [None, [], None] def updatePassword(txt): return [txt.lower(), "*********"] def setModel(val): return val def chat(prompt, user_window, pwd_window, past, response, gptModel): user_window = user_window.lower() isBoss = False if user_window == unames[0] and pwd_window == pwdList[0]: isBoss = True if prompt == 'stats': response = genUsageStats() return [past, response, None] if prompt == 'reset': response = genUsageStats(True) return [past, response, None] if user_window in unames and pwd_window in pwdList: past.append({"role":"user", "content":prompt}) completion = client.chat.completions.create(model=gptModel, messages=past) reply = completion.choices[0].message.content tokens = completion.usage.total_tokens response += "\n\nYOU: " + prompt + "\nGPT: " + reply if isBoss: response += f"\n{gptModel}: {tokens} tokens" if tokens > 40000: response += "\n\nTHIS DIALOG IS GETTING TOO LONG. PLEASE RESTART CONVERSATION SOON." past.append({"role":"assistant", "content": reply}) accessOk = False for i in range(3): try: dataFile = new_func(user_window) with open(dataFile, 'a') as f: f.write(f'{user_window}: {tokens}\n') accessOk = True break except Exception as e: sleep(3) if not accessOk: response += f"\nDATA LOG FAILED, path = {dataFile}" return [past, response , None] else: return [[], "User name and/or password are incorrect", prompt] def new_func(user_window): dataFile = dataDir + user_window + '_log.txt' return dataFile with gr.Blocks() as demo: history = gr.State([]) password = gr.State("") model = gr.State("gpt-3.5-turbo") gr.Markdown('# GPT Chat') gr.Markdown('Enter user name & password then enter prompt and click submit button. GPT 3.5 is cheaper but GPT 4o may perform better. Restart conversation if topic changes') # heading = gr.Label(value="GPT Chat", scale=2, color="Crimson" ) with gr.Row(): user_window = gr.Textbox(label = "User Name") pwd_window = gr.Textbox(label = "Password") pwd_window.blur(updatePassword, pwd_window, [password, pwd_window]) with gr.Row(): clear_button = gr.Button(value="Restart Conversation") gpt_chooser=gr.Radio(choices=[("GPT-3.5","gpt-3.5-turbo"),("GPT-4o","gpt-4o-mini")], value="gpt-3.5-turbo", label="GPT Model", interactive=True) submit_window = gr.Button(value="Submit Prompt/Question") prompt_window = gr.Textbox(label = "Prompt or Question") output_window = gr.Textbox(label = "Dialog") submit_window.click(chat, inputs=[prompt_window, user_window, password, history, output_window, model], outputs=[history, output_window, prompt_window]) clear_button.click(clear, inputs=[], outputs=[prompt_window, history, output_window]) gpt_chooser.input(fn=setModel, inputs=gpt_chooser, outputs=model) demo.launch()