Spaces:
Build error
Build error
Upload 3 files
Browse files
app.py
CHANGED
@@ -70,7 +70,7 @@ with st.sidebar:
|
|
70 |
st.subheader("Select Options:")
|
71 |
|
72 |
with st.sidebar:
|
73 |
-
num_results = int(st.number_input("Number of Results to query", 1, 15, value=
|
74 |
|
75 |
|
76 |
# Choose encoder model
|
@@ -108,7 +108,7 @@ elif encoder_model == "SGPT":
|
|
108 |
|
109 |
|
110 |
with st.sidebar:
|
111 |
-
window = int(st.number_input("Sentence Window Size", 0, 10, value=
|
112 |
|
113 |
with st.sidebar:
|
114 |
threshold = float(
|
|
|
70 |
st.subheader("Select Options:")
|
71 |
|
72 |
with st.sidebar:
|
73 |
+
num_results = int(st.number_input("Number of Results to query", 1, 15, value=6))
|
74 |
|
75 |
|
76 |
# Choose encoder model
|
|
|
108 |
|
109 |
|
110 |
with st.sidebar:
|
111 |
+
window = int(st.number_input("Sentence Window Size", 0, 10, value=1))
|
112 |
|
113 |
with st.sidebar:
|
114 |
threshold = float(
|
utils.py
CHANGED
@@ -115,19 +115,16 @@ def text_lookup(data, sentence_ids):
|
|
115 |
|
116 |
|
117 |
def generate_prompt(query_text, context_list):
|
118 |
-
context = "
|
119 |
-
prompt = f"""Answer the question as
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
Q: {query_text}
|
125 |
-
A:"""
|
126 |
return prompt
|
127 |
|
128 |
|
129 |
def generate_prompt_2(query_text, context_list):
|
130 |
-
context = "
|
131 |
prompt = f"""
|
132 |
Context information is below:
|
133 |
---------------------
|
@@ -144,9 +141,9 @@ def gpt_model(prompt):
|
|
144 |
model="text-davinci-003",
|
145 |
prompt=prompt,
|
146 |
temperature=0.1,
|
147 |
-
max_tokens=
|
148 |
top_p=1.0,
|
149 |
-
frequency_penalty=0.
|
150 |
presence_penalty=1,
|
151 |
)
|
152 |
return response.choices[0].text
|
|
|
115 |
|
116 |
|
117 |
def generate_prompt(query_text, context_list):
|
118 |
+
context = " ".join(context_list)
|
119 |
+
prompt = f"""Answer the question as accurately as possible using the provided context. Try to include as many key details as possible.
|
120 |
+
Context: {context}
|
121 |
+
Question: {query_text}
|
122 |
+
Answer:"""
|
|
|
|
|
|
|
123 |
return prompt
|
124 |
|
125 |
|
126 |
def generate_prompt_2(query_text, context_list):
|
127 |
+
context = " ".join(context_list)
|
128 |
prompt = f"""
|
129 |
Context information is below:
|
130 |
---------------------
|
|
|
141 |
model="text-davinci-003",
|
142 |
prompt=prompt,
|
143 |
temperature=0.1,
|
144 |
+
max_tokens=1024,
|
145 |
top_p=1.0,
|
146 |
+
frequency_penalty=0.5,
|
147 |
presence_penalty=1,
|
148 |
)
|
149 |
return response.choices[0].text
|