Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -190,6 +190,7 @@ def StreamLLMChatResponse(prompt):
|
|
190 |
try:
|
191 |
endpoint_url = API_URL
|
192 |
hf_token = API_KEY
|
|
|
193 |
client = InferenceClient(endpoint_url, token=hf_token)
|
194 |
gen_kwargs = dict(
|
195 |
max_new_tokens=512,
|
@@ -252,6 +253,7 @@ def transcribe_audio(openai_key, file_path, model):
|
|
252 |
}
|
253 |
with open(file_path, 'rb') as f:
|
254 |
data = {'file': f}
|
|
|
255 |
response = requests.post(OPENAI_API_URL, headers=headers, files=data, data={'model': model})
|
256 |
if response.status_code == 200:
|
257 |
st.write(response.json())
|
@@ -378,6 +380,8 @@ def chat_with_model(prompt, document_section, model_choice='gpt-3.5-turbo'):
|
|
378 |
res_box = st.empty()
|
379 |
collected_chunks = []
|
380 |
collected_messages = []
|
|
|
|
|
381 |
for chunk in openai.ChatCompletion.create(model='gpt-3.5-turbo', messages=conversation, temperature=0.5, stream=True):
|
382 |
collected_chunks.append(chunk)
|
383 |
chunk_message = chunk['choices'][0]['delta']
|
|
|
190 |
try:
|
191 |
endpoint_url = API_URL
|
192 |
hf_token = API_KEY
|
193 |
+
st.write('Running client ' + endpoint_url)
|
194 |
client = InferenceClient(endpoint_url, token=hf_token)
|
195 |
gen_kwargs = dict(
|
196 |
max_new_tokens=512,
|
|
|
253 |
}
|
254 |
with open(file_path, 'rb') as f:
|
255 |
data = {'file': f}
|
256 |
+
st.write('STT transcript ' + OPENAI_API_URL)
|
257 |
response = requests.post(OPENAI_API_URL, headers=headers, files=data, data={'model': model})
|
258 |
if response.status_code == 200:
|
259 |
st.write(response.json())
|
|
|
380 |
res_box = st.empty()
|
381 |
collected_chunks = []
|
382 |
collected_messages = []
|
383 |
+
|
384 |
+
st.write('LLM stream ' + 'gpt-3.5-turbo')
|
385 |
for chunk in openai.ChatCompletion.create(model='gpt-3.5-turbo', messages=conversation, temperature=0.5, stream=True):
|
386 |
collected_chunks.append(chunk)
|
387 |
chunk_message = chunk['choices'][0]['delta']
|