from llama_index import GPTVectorStoreIndex, SimpleDirectoryReader import os from llama_index.node_parser import SimpleNodeParser from llama_index import StorageContext, load_index_from_storage import gradio as gr import openai os.environ['OPENAI_API_KEY'] = os.environ["my_key"] # rebuild storage context storage_context = StorageContext.from_defaults(persist_dir="index_dir_full") # load index index = load_index_from_storage(storage_context) # strat a search engine query_engine = index.as_query_engine() # APP def get_model_reply_no_prev_context(question): response = query_engine.query(question) final_response = response.response[1:] return final_response # def get_model_reply_no_prev_context(question): # final_response = question # return final_response title = "Knowledge Center at Penta Building Group" description = """
" The program is trained to answer questions based on the documentation of 'Lessons Learned' from previous projects! >
""" article = "Your feedback matters!If you like it, contact us at mgupta70@asu.edu" gr.Interface( fn=get_model_reply_no_prev_context, inputs="textbox", outputs="text", title=title, description=description, article=article, examples=[["Which code is to be used while planning a pedestrian walkway?"], ["How to determine the exact location of existing underground lines?"], ['What one should do to avoid struck-by hazard incidents?']] ).launch() import gradio as gr