pavvloff commited on
Commit
182cd79
1 Parent(s): fe6de2a

Update backend/query_llm.py

Browse files
Files changed (1) hide show
  1. backend/query_llm.py +2 -1
backend/query_llm.py CHANGED
@@ -143,7 +143,8 @@ def generate_openai(prompt: str, history: str, temperature: float = 0.9, max_new
143
  stream=True)
144
  output = ""
145
  for chunk in stream:
146
- output += chunk.choices[0].delta.content
 
147
  yield output
148
 
149
  except Exception as e:
 
143
  stream=True)
144
  output = ""
145
  for chunk in stream:
146
+ if chunk.choices[0].delta.content is not None:
147
+ output += chunk.choices[0].delta.content
148
  yield output
149
 
150
  except Exception as e: