Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -227,18 +227,19 @@ def StreamLLMChatResponse(prompt):
|
|
227 |
except:
|
228 |
st.write('Llama model is asleep. Starting up now on A10 - please give 5 minutes then retry as KEDA scales up from zero to activate running container(s).')
|
229 |
|
230 |
-
def query(filename):
|
231 |
-
with open(filename, "rb") as f:
|
232 |
-
data = f.read
|
233 |
-
st.write('Posting request to model ' + API_URL_IE)
|
234 |
-
response = requests.post(API_URL_IE, headers=headers, data=data)
|
235 |
-
return response.json()
|
236 |
|
237 |
# 4. Run query with payload
|
238 |
-
|
239 |
-
|
240 |
-
|
241 |
-
|
|
|
242 |
|
243 |
def get_output(prompt):
|
244 |
return query({"inputs": prompt})
|
|
|
227 |
except:
|
228 |
st.write('Llama model is asleep. Starting up now on A10 - please give 5 minutes then retry as KEDA scales up from zero to activate running container(s).')
|
229 |
|
230 |
+
#def query(filename):
|
231 |
+
# with open(filename, "rb") as f:
|
232 |
+
# data = f.read
|
233 |
+
# st.write('Posting request to model ' + API_URL_IE)
|
234 |
+
# response = requests.post(API_URL_IE, headers=headers, data=data)
|
235 |
+
# return response.json()
|
236 |
|
237 |
# 4. Run query with payload
|
238 |
+
def query(payload):
|
239 |
+
st.write('Posting request to model ' + API_URL_IE)
|
240 |
+
response = requests.post(API_URL, headers=headers, json=payload)
|
241 |
+
st.markdown(response.json())
|
242 |
+
return response.json()
|
243 |
|
244 |
def get_output(prompt):
|
245 |
return query({"inputs": prompt})
|