Spaces:
Sleeping
Sleeping
File size: 1,583 Bytes
d62d989 07d1fb4 d62d989 cca8beb d62d989 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
import os
import gradio as gr
from langchain.agents.agent_types import AgentType
from langchain_experimental.agents.agent_toolkits import create_pandas_dataframe_agent
from langchain_openai import AzureChatOpenAI
from sklearn.datasets import fetch_openml
gpt35 = AzureChatOpenAI(
api_key=os.environ["AZURE_OPENAI_KEY"],
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
api_version="2023-05-15",
deployment_name="gpt-35-turbo"
)
bank_data, _ = fetch_openml(data_id=43718, return_X_y=True)
pandas_agent = create_pandas_dataframe_agent(
llm=gpt35,
df=bank_data,
verbose=False,
agent_type=AgentType.OPENAI_FUNCTIONS,
)
def predict(user_input):
try:
response = pandas_agent.invoke(user_input)
prediction = response['output']
except Exception as e:
prediction = e
return prediction
textbox = gr.Textbox(placeholder="Enter your query here", lines=6)
interface = gr.Interface(
inputs=textbox, fn=predict, outputs="text",
title="Query BFSI customer information",
description="This web API presents an interface to ask questions on customer information stored in a database.",
examples=[["What is the average balance maintained by our customers?", ""],
["How many customers have subscribed to a term deposit?", ""],
["How many customers have defaulted on loans?", ""],
["Do customers who default maintain a low balance?", ""]
]
)
with gr.Blocks() as demo:
interface.launch()
demo.queue(concurrency_count=16)
demo.launch() |