Spaces:
Runtime error
Runtime error
File size: 2,130 Bytes
b6fc291 6e617c3 b6fc291 6e617c3 b6fc291 6e617c3 b6fc291 6e617c3 b6fc291 6e617c3 b6fc291 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 |
# Benim hesap
import gradio as gr
import pinecone
import openai
index_name = 'gpt-4-langchain-docs'
# initialize connection to pinecone
pinecone.init(
api_key="08dc4515-1799-4a23-81a0-b9f86975f84f", # app.pinecone.io (console)
environment="us-west4-gcp" # next to API key in console
)
# check if index already exists (it shouldn't if this is first time)
if index_name not in pinecone.list_indexes():
# if does not exist, create index
pinecone.create_index(
index_name,
dimension=len(res['data'][0]['embedding']),
metric='dotproduct'
)
# connect to index
index = pinecone.GRPCIndex(index_name)
def ask(OpenAI_key,query):
openai.api_key = OpenAI_key #platform.openai.com
embed_model = "text-embedding-ada-002"
res = openai.Embedding.create(
input=[
"Sample document text goes here",
"there will be several phrases in each batch"
], engine=embed_model
)
index_name = 'gpt-4-langchain-docs'
res = openai.Embedding.create(
input=[query],
engine=embed_model
)
# retrieve from Pinecone
xq = res['data'][0]['embedding']
# get relevant contexts (including the questions)
res = index.query(xq, top_k=5, include_metadata=True)
contexts = [item['metadata']['text'] for item in res['matches']]
augmented_query = "\n\n---\n\n".join(contexts)+"\n\n-----\n\n"+query
primer = f"""You are Q&A bot. A highly intelligent system that answers
user questions based on the information provided by the user above
each question. If the information can not be found in the information
provided by the user you truthfully say "I don't know".
"""
res = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "system", "content": primer},
{"role": "user", "content": augmented_query}
]
)
from IPython.display import Markdown
response = (res['choices'][0]['message']['content'])
return response
demo = gr.Interface(title = 'ShipsGo AI Assistant' , fn=ask, inputs=["text","text"] , outputs="text")
demo.launch() |