sudip1310 commited on
Commit
ba73332
1 Parent(s): 48dd34a

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +78 -0
app.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ !git clone https://github.com/sudipmondal1310/Internship.git
2
+ !pip install llama-index==0.5.6
3
+ !pip install langchain==0.0.148
4
+ from llama_index import SimpleDirectoryReader, GPTListIndex, readers, GPTSimpleVectorIndex, LLMPredictor, PromptHelper, ServiceContext
5
+ from langchain import OpenAI
6
+ import sys
7
+ import os
8
+ from IPython.display import Markdown, display
9
+
10
+ def construct_index(directory_path):
11
+ # set maximum input size
12
+ max_input_size = 4096
13
+ # set number of output tokens
14
+ num_outputs = 2000
15
+ # set maximum chunk overlap
16
+ max_chunk_overlap = 20
17
+ # set chunk size limit
18
+ chunk_size_limit = 600
19
+
20
+ # define prompt helper
21
+ prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
22
+
23
+ # define LLM
24
+ llm_predictor = LLMPredictor(llm=OpenAI(temperature=0.5, model_name="text-davinci-003", max_tokens=num_outputs))
25
+
26
+ documents = SimpleDirectoryReader(directory_path).load_data()
27
+
28
+ service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor, prompt_helper=prompt_helper)
29
+ index = GPTSimpleVectorIndex.from_documents(documents, service_context=service_context)
30
+
31
+ index.save_to_disk('index.json')
32
+
33
+ return index
34
+
35
+ os.environ["OPENAI_API_KEY"]="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"
36
+
37
+ construct_index("/content/Internship/Data")
38
+
39
+ pip install gradio
40
+
41
+
42
+ def ask_ai_new(query):
43
+ index = GPTSimpleVectorIndex.load_from_disk('index.json')
44
+ while True:
45
+ #query = input("What do you want to ask? ")
46
+ response = index.query(query)
47
+ response = Markdown(f"<b>{response.response}</b>")
48
+ print(response)
49
+ return response.data
50
+
51
+ import time
52
+ import random
53
+ import gradio as gr
54
+
55
+ with gr.Blocks() as demo:
56
+ chatbot = gr.Chatbot()
57
+ query = gr.inputs.Textbox(label="Enter your message here")
58
+ clear = gr.Button("Clear")
59
+
60
+ def user(user_message, history):
61
+ return "", history + [[user_message, None]]
62
+
63
+ def bot(history):
64
+ bot_message = ask_ai_new(history[-1][0])
65
+ print(bot_message)
66
+ history[-1][1] = ""
67
+ for character in bot_message:
68
+ history[-1][1] += character
69
+ time.sleep(0.05)
70
+ yield history
71
+
72
+ query.submit(user, [query, chatbot], [query, chatbot], queue=False).then(
73
+ bot, chatbot, chatbot
74
+ )
75
+ clear.click(lambda: None, None, chatbot, queue=False)
76
+
77
+ demo.queue()
78
+ demo.launch()