api_for_chat / app.py
ldhldh's picture
Update app.py
f8156e8
raw
history blame
1.48 kB
import gradio as gr
from gradio_client import Client as GrClient
import inspect
from gradio import routes
from typing import List, Type
import requests, os, re, asyncio
loop = asyncio.get_event_loop()
gradio_client = GrClient('https://4602593e320dced0af.gradio.live/')
# Monkey patch
def get_types(cls_set: List[Type], component: str):
docset = []
types = []
if component == "input":
for cls in cls_set:
doc = inspect.getdoc(cls)
doc_lines = doc.split("\n")
docset.append(doc_lines[1].split(":")[-1])
types.append(doc_lines[1].split(")")[0].split("(")[-1])
else:
for cls in cls_set:
doc = inspect.getdoc(cls)
doc_lines = doc.split("\n")
docset.append(doc_lines[-1].split(":")[-1])
types.append(doc_lines[-1].split(")")[0].split("(")[-1])
return docset, types
routes.get_types = get_types
# App code
def chat(x, id):
result = gradio_client.predict(
x,
# str representing input in 'User input' Textbox component
50,
id,
fn_index=0
)
result = str(result)
return result
with gr.Blocks() as demo:
count = 0
aa = gr.Interface(
fn=chat,
inputs=["text","text"],
outputs="text",
description="chat",
examples= [[f"λ„ˆλŠ” 꿈이 뭐야?", "771171"],[f"λ„ˆλŠ” 무슨 색을 κ°€μž₯ μ’‹μ•„ν•΄?", "772122"]]
)
demo.queue(max_size=32).launch(enable_queue=True)