Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -23,60 +23,49 @@ import pandas as pd
|
|
23 |
from llama_index import SimpleDirectoryReader, GPTListIndex, readers, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
|
24 |
from langchain import OpenAI
|
25 |
from IPython.display import Markdown, display
|
|
|
26 |
import gradio as gr
|
27 |
-
import gradio
|
28 |
-
import pandas as pd
|
29 |
-
from llama_index import SimpleDirectoryReader, GPTListIndex, readers, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
|
30 |
-
from langchain import OpenAI
|
31 |
-
import sys
|
32 |
-
import os
|
33 |
-
from IPython.display import Markdown, display
|
34 |
-
import pandas as pd
|
35 |
-
from llama_index import SimpleDirectoryReader, GPTListIndex, readers, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
|
36 |
-
from langchain import OpenAI
|
37 |
-
from IPython.display import Markdown, display
|
38 |
-
#import streamlit as st
|
39 |
-
import pickle
|
40 |
df = pd.read_excel('Shegardi_dataset.xlsx',sheet_name = 'dataset')
|
|
|
41 |
os.environ['OPENAI_API_KEY'] = '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'
|
42 |
-
|
43 |
def construct_index(directory_path):
|
|
|
44 |
max_input_size = 4096
|
|
|
45 |
num_outputs = 2000
|
|
|
46 |
max_chunk_overlap = 20
|
|
|
47 |
chunk_size_limit = 600
|
48 |
|
|
|
49 |
llm_predictor = LLMPredictor(llm=OpenAI(temperature=0.5, model_name="text-davinci-003", max_tokens=num_outputs))
|
50 |
prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
|
51 |
-
|
52 |
documents = SimpleDirectoryReader(directory_path).load_data()
|
53 |
-
|
54 |
index = GPTSimpleVectorIndex(
|
55 |
documents, llm_predictor=llm_predictor, prompt_helper=prompt_helper
|
56 |
)
|
57 |
|
58 |
index.save_to_disk('index.json')
|
59 |
|
60 |
-
#with open('llm_predictor.pkl', 'wb') as f:
|
61 |
-
#pickle.dump(llm_predictor, f)
|
62 |
-
|
63 |
return index
|
64 |
|
65 |
-
|
66 |
-
|
67 |
-
|
|
|
68 |
|
69 |
-
|
|
|
70 |
response = index.query(query, response_mode="compact")
|
71 |
return response.response
|
72 |
-
|
73 |
iface = gr.Interface(fn=ask_ai, inputs="text", outputs="text", title="The following is a conversation with a human called Shegardi. Shegardi is helpful, precise, truthful, and very friendly. Also, Shegardi is an employee of Warba Bank, located in Kuwait. Shegardi will only use the information provided to him. ",
|
74 |
description="Enter a question and get an answer from Shegardi.")
|
75 |
-
iface.launch()
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
|
81 |
|
82 |
|
|
|
23 |
from llama_index import SimpleDirectoryReader, GPTListIndex, readers, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
|
24 |
from langchain import OpenAI
|
25 |
from IPython.display import Markdown, display
|
26 |
+
import streamlit as st
|
27 |
import gradio as gr
|
28 |
+
#import gradio
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
df = pd.read_excel('Shegardi_dataset.xlsx',sheet_name = 'dataset')
|
30 |
+
#os.environ['OPENAI_API_KEY'] = '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'
|
31 |
os.environ['OPENAI_API_KEY'] = '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'
|
|
|
32 |
def construct_index(directory_path):
|
33 |
+
# set maximum input size
|
34 |
max_input_size = 4096
|
35 |
+
# set number of output tokens
|
36 |
num_outputs = 2000
|
37 |
+
# set maximum chunk overlap
|
38 |
max_chunk_overlap = 20
|
39 |
+
# set chunk size limit
|
40 |
chunk_size_limit = 600
|
41 |
|
42 |
+
# define LLM
|
43 |
llm_predictor = LLMPredictor(llm=OpenAI(temperature=0.5, model_name="text-davinci-003", max_tokens=num_outputs))
|
44 |
prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
|
45 |
+
|
46 |
documents = SimpleDirectoryReader(directory_path).load_data()
|
47 |
+
|
48 |
index = GPTSimpleVectorIndex(
|
49 |
documents, llm_predictor=llm_predictor, prompt_helper=prompt_helper
|
50 |
)
|
51 |
|
52 |
index.save_to_disk('index.json')
|
53 |
|
|
|
|
|
|
|
54 |
return index
|
55 |
|
56 |
+
#construct_index("context_data/data")
|
57 |
+
|
58 |
+
#import streamlit as st
|
59 |
+
# Include other necessary imports here
|
60 |
|
61 |
+
def ask_ai(query):
|
62 |
+
index = GPTSimpleVectorIndex.load_from_disk('index.json')
|
63 |
response = index.query(query, response_mode="compact")
|
64 |
return response.response
|
65 |
+
|
66 |
iface = gr.Interface(fn=ask_ai, inputs="text", outputs="text", title="The following is a conversation with a human called Shegardi. Shegardi is helpful, precise, truthful, and very friendly. Also, Shegardi is an employee of Warba Bank, located in Kuwait. Shegardi will only use the information provided to him. ",
|
67 |
description="Enter a question and get an answer from Shegardi.")
|
68 |
+
iface.launch(share=True)
|
|
|
|
|
|
|
|
|
69 |
|
70 |
|
71 |
|