Files changed (1) hide show
  1. app.py +77 -23
app.py CHANGED
@@ -1,30 +1,84 @@
 
 
1
  import gradio as gr
 
2
  import openai
3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
 
5
- def Question(OpenAI_Key, Ask_Question):
6
- # pass the generated text to audio
7
- openai.api_key = OpenAI_Key
8
- # Set up the model and prompt
9
- model_engine = "text-davinci-003"
10
- #prompt = "who is alon musk?"
11
- # Generate a response
12
- completion = openai.Completion.create(
13
- engine=model_engine,
14
- prompt=(f"{Ask_Question}"),
15
- max_tokens=1024,
16
- n=1,
17
- stop=None,
18
- temperature=0.5,)
19
- response = completion.choices[0].text
20
- #out_result=resp['message']
21
- return response
 
 
22
 
23
 
24
- demo = gr.Interface(
25
- title='OpenAI ChatGPT Application',
26
- fn=Question,
27
- inputs=["text", "text"],
28
- outputs="text",)
 
 
 
 
 
 
 
 
29
 
30
- demo.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Benim hesap
2
+
3
  import gradio as gr
4
+ import pinecone
5
  import openai
6
 
7
+ index_name = 'gpt-4-langchain-docs'
8
+
9
+ # initialize connection to pinecone
10
+ pinecone.init(
11
+ api_key="08dc4515-1799-4a23-81a0-b9f86975f84f", # app.pinecone.io (console)
12
+ environment="us-west4-gcp" # next to API key in console
13
+ )
14
+
15
+ # check if index already exists (it shouldn't if this is first time)
16
+ if index_name not in pinecone.list_indexes():
17
+ # if does not exist, create index
18
+ pinecone.create_index(
19
+ index_name,
20
+ dimension=len(res['data'][0]['embedding']),
21
+ metric='dotproduct'
22
+ )
23
+ # connect to index
24
+ index = pinecone.GRPCIndex(index_name)
25
 
26
+
27
+ def ask(OpenAI_key,query):
28
+
29
+
30
+ openai.api_key = OpenAI_key #platform.openai.com
31
+
32
+ embed_model = "text-embedding-ada-002"
33
+
34
+ res = openai.Embedding.create(
35
+ input=[
36
+ "Sample document text goes here",
37
+ "there will be several phrases in each batch"
38
+ ], engine=embed_model
39
+ )
40
+
41
+ index_name = 'gpt-4-langchain-docs'
42
+
43
+
44
+
45
 
46
 
47
+
48
+ res = openai.Embedding.create(
49
+ input=[query],
50
+ engine=embed_model
51
+ )
52
+
53
+ # retrieve from Pinecone
54
+ xq = res['data'][0]['embedding']
55
+
56
+ # get relevant contexts (including the questions)
57
+ res = index.query(xq, top_k=5, include_metadata=True)
58
+
59
+ contexts = [item['metadata']['text'] for item in res['matches']]
60
 
61
+ augmented_query = "\n\n---\n\n".join(contexts)+"\n\n-----\n\n"+query
62
+
63
+ primer = f"""You are Q&A bot. A highly intelligent system that answers
64
+ user questions based on the information provided by the user above
65
+ each question. If the information can not be found in the information
66
+ provided by the user you truthfully say "I don't know".
67
+ """
68
+
69
+ res = openai.ChatCompletion.create(
70
+ model="gpt-3.5-turbo",
71
+ messages=[
72
+ {"role": "system", "content": primer},
73
+ {"role": "user", "content": augmented_query}
74
+ ]
75
+ )
76
+ from IPython.display import Markdown
77
+
78
+ response = (res['choices'][0]['message']['content'])
79
+
80
+
81
+ return response
82
+
83
+ demo = gr.Interface(title = 'ShipsGo AI Assistant' , fn=ask, inputs=["text","text"] , outputs="text")
84
+ demo.launch()