LLM_exp / app.py
ssm123ssm's picture
Update app.py
177569f
import os
import openai
from llama_index import StorageContext, load_index_from_storage
import logging
import sys
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
logging.getLogger().handlers = []
logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))
openai.api_key = os.environ['OPENAI_API_KEY']
from llama_index import load_indices_from_storage
storage_context = StorageContext.from_defaults(persist_dir='./storage')
indx = load_indices_from_storage(storage_context)
from llama_index.query_engine import CitationQueryEngine
query_engine = CitationQueryEngine.from_args(index=indx[1], similarity_top_k=2)
list_query_engine = indx[0].as_query_engine(
response_mode='tree_summarize',
use_async=True,
)
vector_query_engine = indx[1].as_query_engine(similarity_top_k=2)
import gradio as gr
def chatbot(input_text, is_qen):
engine = query_engine if is_qen else vector_query_engine
response = engine.query(input_text)
return response
iface = gr.Interface(fn=chatbot,
inputs=[gr.inputs.Textbox(lines=7, default="What are examples for short, intermediate and long acting glucocorticoids?", label="Enter your text here"), gr.Checkbox(label="Use Citation Engine instead of Query Engine.")],
outputs="text",
title="Project Rang and Dale")
iface.launch()
#%%