File size: 1,288 Bytes
74b36f6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
from llama_index import GPTVectorStoreIndex, SimpleDirectoryReader
import os
import time
from llama_index.node_parser import SimpleNodeParser
from llama_index import StorageContext, load_index_from_storage
import gradio as gr
import openai

os.environ['OPENAI_API_KEY'] = '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'

# rebuild storage context
storage_context = StorageContext.from_defaults(persist_dir="index_dir")

# load index
index = load_index_from_storage(storage_context)

# strat a search engine
query_engine = index.as_query_engine()



# APP

def get_model_reply_no_prev_context(question):
    response = query_engine.query(question)
    final_response = response.response[1:]
    return final_response

title = "Knowledge Center at Penta Building Group"
description = """
The program is trained to answer questions based on the documentation of 'Lessons Learned' from previous projects!

"""

article = "Your feedback matters!If you like it, contact me at mgupta70@asu.edu"

gr.Interface(
    fn=get_model_reply_no_prev_context,
    inputs="textbox",
    outputs="text",
    title=title,
    description=description,
    article=article,
    examples=[["Which code is to be used while planning a pedestrian walkway?"], ["What is AHJ?"]], live=True
).launch(share=True)