|
import os |
|
from gpt_index import GPTSimpleVectorIndex |
|
import gradio as gr |
|
API_URL='https://api.openai.com/v1/chat/completions' |
|
openai_api_key=os.environ['OPENAI_API_KEY'] |
|
if openai_api_key is None:raise ValueError('Did not find openai_api_key, please add an environment variable `OPENAI_API_KEY` which contains it, or pass `openai_api_key` as a named parameter.') |
|
top_p_chatgpt=1. |
|
temperature_chatgpt=1. |
|
def predict_chatgpt(inputs,chat_counter_chatgpt,chatbot_chatgpt=[],history=[]): |
|
M='user';J='content';I='role';D=chat_counter_chatgpt;C=inputs;A=history |
|
if D!=0: |
|
E=[] |
|
for K in chatbot_chatgpt:F={};F[I]=M;F[J]=K[0];G={};G[I]='assistant';G[J]=K[1];E.append(F);E.append(G) |
|
H={};H[I]=M;H[J]=C;E.append(H) |
|
D+=1;A.append('You asked: '+C);N=GPTSimpleVectorIndex.load_from_disk('IPCIndex.json');O=N.query(C);P=O.response.split();L=0;B='';S=0 |
|
for Q in P: |
|
B=B+' '+Q |
|
if L==0:A.append(' '+B) |
|
else:A[-1]=B |
|
R=[(A[B],A[B+1])for B in range(0,len(A)-1,2)];L+=1;yield(R,A,D) |
|
def reset_textbox():return gr.update(value='') |
|
def reset_chat(chatbot,state):return None,[] |
|
with gr.Blocks(css='#col_container {width: 1000px; margin-left: auto; margin-right: auto;}\n #chatgpt {height: 500px; overflow: auto;}} ',theme=gr.themes.Default(primary_hue='slate'))as LegalProdigy: |
|
with gr.Row(): |
|
with gr.Column(scale=14): |
|
with gr.Box(): |
|
with gr.Row(): |
|
with gr.Column(scale=13):inputs=gr.Textbox(label='Ask any legal query ⤵️ Try : Explain arbitration process') |
|
with gr.Column(scale=1):b1=gr.Button('Submit',elem_id='submit').style(full_width=True);b2=gr.Button('Clear',elem_id='clear').style(full_width=True) |
|
state_chatgpt=gr.State([]) |
|
with gr.Box(): |
|
with gr.Row():chatbot_chatgpt=gr.Chatbot(elem_id='chatgpt',label='Legal Prodigy') |
|
chat_counter_chatgpt=gr.Number(value=0,visible=False,precision=0);inputs.submit(reset_textbox,[],[inputs]);b1.click(predict_chatgpt,[inputs,chat_counter_chatgpt,chatbot_chatgpt,state_chatgpt],[chatbot_chatgpt,state_chatgpt]);b2.click(reset_chat,[chatbot_chatgpt,state_chatgpt],[chatbot_chatgpt,state_chatgpt]);LegalProdigy.queue(concurrency_count=16).launch(height=2500,debug=True) |