Spaces:
Sleeping
Sleeping
File size: 1,278 Bytes
4a2f42b d8cec1e 9131d62 d8cec1e 9131d62 d8cec1e 9131d62 4a2f42b 9131d62 d8cec1e 4a2f42b 9131d62 d8cec1e 9131d62 d8cec1e 2c2d40a d8cec1e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 |
import gradio as gr
from src.config import readme
from src.model import Model
from src.config import justice_names
from src.case_summaries import CaseSummaries
def run():
choices = list(justice_names)+['Court']
summaries = CaseSummaries()
case_placeholder = summaries.random_summary()
model = Model()
with gr.Blocks(theme=gr.themes.Soft(text_size=gr.themes.sizes.text_lg)) as demo:
cache = gr.Textbox(visible=False)
description = gr.Markdown(value=readme)
dropdown = gr.Dropdown(
label="Justice Name",
choices=choices,
value='Court',
interactive=True,
)
with gr.Row():
btn = gr.Button(value="Get Random Case")
btn2 = gr.Button(value="Run")
with gr.Row():
txt = gr.Textbox(label="Case Description", lines=15, value=case_placeholder)
txt2 = gr.Chatbot(label='Predicted Court Opinion')
btn.click(summaries.random_summary, outputs=[txt], queue=False)
btn2.click(lambda x: x, inputs=[txt], outputs=cache, queue=False).then(
model.inference, inputs=[cache, dropdown], outputs=txt2)
demo.queue().launch(share=False, server_name="0.0.0.0")
if __name__=='__main__':
run() |