jamescalam commited on
Commit
c873676
1 Parent(s): ad29685

switch to text-davinci-003

Browse files
Files changed (1) hide show
  1. app.py +4 -5
app.py CHANGED
@@ -35,7 +35,7 @@ def init_pinecone(index_name):
35
  count = stats['namespaces']['']['vector_count']
36
  return index, dims, count
37
 
38
- def create_context(question, index, lib_meta, max_len=3750, size="curie", top_k=5):
39
  """
40
  Find most relevant context for a question via Pinecone search
41
  """
@@ -69,7 +69,7 @@ def create_context(question, index, lib_meta, max_len=3750, size="curie", top_k=
69
 
70
  def answer_question(
71
  index,
72
- fine_tuned_qa_model="text-davinci-002",
73
  question="Am I allowed to publish model outputs to Twitter, without a human review?",
74
  instruction="Answer the question based on the context below, and if the question can't be answered based on the context, say \"I don't know\"\n\nContext:\n{0}\n\n---\n\nQuestion: {1}\nAnswer:",
75
  max_len=3550,
@@ -88,7 +88,6 @@ def answer_question(
88
  index,
89
  lib_meta=domains,
90
  max_len=max_len,
91
- size=size,
92
  top_k=top_k
93
  )
94
  if debug:
@@ -200,11 +199,11 @@ st.sidebar.write(f"""
200
 
201
  **Pinecone index size**: {count}
202
 
203
- **OpenAI embedding model**: *text-search-curie-query-001*
204
 
205
  **Vector dimensionality**: {dims}
206
 
207
- **OpenAI generation model**: *text-davinci-002*
208
 
209
  ---
210
 
35
  count = stats['namespaces']['']['vector_count']
36
  return index, dims, count
37
 
38
+ def create_context(question, index, lib_meta, max_len=3750, top_k=5):
39
  """
40
  Find most relevant context for a question via Pinecone search
41
  """
69
 
70
  def answer_question(
71
  index,
72
+ fine_tuned_qa_model="text-davinci-003",
73
  question="Am I allowed to publish model outputs to Twitter, without a human review?",
74
  instruction="Answer the question based on the context below, and if the question can't be answered based on the context, say \"I don't know\"\n\nContext:\n{0}\n\n---\n\nQuestion: {1}\nAnswer:",
75
  max_len=3550,
88
  index,
89
  lib_meta=domains,
90
  max_len=max_len,
 
91
  top_k=top_k
92
  )
93
  if debug:
199
 
200
  **Pinecone index size**: {count}
201
 
202
+ **OpenAI embedding model**: *text-embedding-ada-002*
203
 
204
  **Vector dimensionality**: {dims}
205
 
206
+ **OpenAI generation model**: *text-davinci-003*
207
 
208
  ---
209