from llama_index import GPTVectorStoreIndex, SimpleDirectoryReader import os from llama_index.node_parser import SimpleNodeParser from llama_index import StorageContext, load_index_from_storage import gradio as gr import openai os.environ['OPENAI_API_KEY'] = os.environ["my_key"] # rebuild storage context storage_context = StorageContext.from_defaults(persist_dir="index_dir_full") # load index index = load_index_from_storage(storage_context) # strat a search engine query_engine = index.as_query_engine() # APP def get_model_reply_no_prev_context(question): response = query_engine.query(question) final_response = response.response[1:] return final_response # def get_model_reply_no_prev_context(question): # final_response = question # return final_response title = "Knowledge Center at Penta Building Group" description = """