|
from llama_index import GPTVectorStoreIndex, SimpleDirectoryReader |
|
import os |
|
import time |
|
from llama_index.node_parser import SimpleNodeParser |
|
from llama_index import StorageContext, load_index_from_storage |
|
import gradio as gr |
|
import openai |
|
|
|
os.environ['OPENAI_API_KEY'] = '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' |
|
|
|
|
|
storage_context = StorageContext.from_defaults(persist_dir="index_dir") |
|
|
|
|
|
index = load_index_from_storage(storage_context) |
|
|
|
|
|
query_engine = index.as_query_engine() |
|
|
|
|
|
|
|
|
|
|
|
def get_model_reply_no_prev_context(question): |
|
response = query_engine.query(question) |
|
final_response = response.response[1:] |
|
return final_response |
|
|
|
title = "Knowledge Center at Penta Building Group" |
|
description = """ |
|
The program is trained to answer questions based on the documentation of 'Lessons Learned' from previous projects! |
|
|
|
""" |
|
|
|
article = "Your feedback matters!If you like it, contact me at mgupta70@asu.edu" |
|
|
|
gr.Interface( |
|
fn=get_model_reply_no_prev_context, |
|
inputs="textbox", |
|
outputs="text", |
|
title=title, |
|
description=description, |
|
article=article, |
|
examples=[["Which code is to be used while planning a pedestrian walkway?"], ["What is AHJ?"]], live=True |
|
).launch(share=True) |