File size: 5,082 Bytes
dc7c3c1 fc2712b dc7c3c1 fc2712b dc7c3c1 fc2712b dc7c3c1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 |
import os
import gradio as gr
from ctm.ctms.ctm_base import BaseConsciousnessTuringMachine
ctm = BaseConsciousnessTuringMachine()
ctm.add_processor("gpt4_text_emotion_processor", group_name="group_1")
ctm.add_processor("gpt4_text_summary_processor", group_name="group_1")
ctm.add_supervisor("gpt4_supervisor")
DEPLOYED = os.getenv("DEPLOYED", "true").lower() == "true"
def introduction():
with gr.Column(scale=2):
gr.Image(
"images/sotopia.jpg", elem_id="banner-image", show_label=False
)
with gr.Column(scale=5):
gr.Markdown(
"""Consciousness Turing Machine Demo
"""
)
def add_processor(processor_name):
print('add processor ', processor_name)
ctm.add_processor(processor_name)
print(len(ctm.processor_list))
def processor_tab():
with gr.Row() as row:
button1 = gr.Button("Text Emotion Analyzer")
button2 = gr.Button("Text Summary Generator")
invisible_input1 = gr.Textbox(
value="gpt4_text_emotion_processor",
visible=False
)
invisible_input2 = gr.Textbox(
value="gpt4_text_summary_processor",
visible=False
)
button1.click(
fn=add_processor,
inputs=[invisible_input1],
)
button2.click(
fn=add_processor,
inputs=[invisible_input2],
)
def forward(query, content, image, state):
state['question'] = query
ask_processors_output_info, state = ask_processors(query, content, image, state)
uptree_competition_output_info, state = uptree_competition(state)
ask_supervisor_output_info, state = ask_supervisor(state)
ctm.downtree_broadcast(state['winning_output'])
ctm.link_form(state['processor_output'])
return ask_processors_output_info, uptree_competition_output_info, ask_supervisor_output_info, state
def ask_processors(query, content, image, state):
# Simulate processing here
processor_output = ctm.ask_processors(
question=query,
context=content,
image_path=None,
audio_path=None,
video_path=None
)
output_info = ''
for name, info in processor_output.items():
output_info += f"{name}: {info['gist']}\n"
state['processor_output'] = processor_output
return output_info, state
def uptree_competition(state):
winning_output = ctm.uptree_competition(
state['processor_output']
)
state['winning_output'] = winning_output
output_info = 'The winning processor is: {}\nThe winning gist is: {}\n'.format(winning_output['name'], winning_output['gist'])
return output_info, state
def ask_supervisor(state):
question = state['question']
winning_output = state['winning_output']
answer, score = ctm.ask_supervisor(question, winning_output)
output_info = f"The answer to the query \"{question}\" is: {answer}\nThe confidence for answering is: {score}\n"
state['answer'] = answer
state['score'] = score
return output_info, state
def interface_tab():
with gr.Blocks() as interface_tab:
state = gr.State({}) # State to hold and pass values
with gr.Column():
# Inputs
content = gr.Textbox(label="Enter your text here")
query = gr.Textbox(label="Enter your query here")
image = gr.Image(label="Upload your image")
audio = gr.Audio(label="Upload or Record Audio")
video = gr.Video(label="Upload or Record Video")
# Processing buttons
forward_button = gr.Button("Start CTM forward process")
# Outputs
processors_output = gr.Textbox(
label="Processors Output",
visible=True
)
competition_output = gr.Textbox(
label="Up-tree Competition Output",
visible=True
)
supervisor_output = gr.Textbox(
label="Supervisor Output",
visible=True
)
# Set up button to start or continue processing
forward_button.click(
fn=forward,
inputs=[query, content, image, state],
outputs=[processors_output, competition_output, supervisor_output, state]
)
return interface_tab
def main():
with gr.Blocks(
css="""#chat_container {height: 820px; width: 1000px; margin-left: auto; margin-right: auto;}
#chatbot {height: 600px; overflow: auto;}
#create_container {height: 750px; margin-left: 0px; margin-right: 0px;}
#tokenizer_renderer span {white-space: pre-wrap}
"""
) as demo:
with gr.Row():
introduction()
with gr.Row():
processor_tab()
with gr.Row():
interface_tab()
return demo
def start_demo():
demo = main()
if DEPLOYED:
demo.queue(api_open=False).launch(show_api=False)
else:
demo.queue()
demo.launch(share=False, server_name="0.0.0.0")
if __name__ == "__main__":
start_demo() |