Kevin Hu commited on
Commit
ce796e3
·
1 Parent(s): edf3ea6

show error log of KG extraction (#2045)

Browse files

### What problem does this PR solve?

### Type of change


- [x] Performance Improvement

Files changed (1) hide show
  1. graphrag/graph_extractor.py +2 -0
graphrag/graph_extractor.py CHANGED
@@ -142,6 +142,7 @@ class GraphExtractor:
142
  total_token_count += token_count
143
  if callback: callback(msg=f"{doc_index+1}/{total}, elapsed: {timer() - st}s, used tokens: {total_token_count}")
144
  except Exception as e:
 
145
  logging.exception("error extracting graph")
146
  self._on_error(
147
  e,
@@ -184,6 +185,7 @@ class GraphExtractor:
184
  text = perform_variable_replacements(CONTINUE_PROMPT, history=history, variables=variables)
185
  history.append({"role": "user", "content": text})
186
  response = self._llm.chat("", history, gen_conf)
 
187
  results += response or ""
188
 
189
  # if this is the final glean, don't bother updating the continuation flag
 
142
  total_token_count += token_count
143
  if callback: callback(msg=f"{doc_index+1}/{total}, elapsed: {timer() - st}s, used tokens: {total_token_count}")
144
  except Exception as e:
145
+ if callback: callback("Knowledge graph extraction error:{}".format(str(e)))
146
  logging.exception("error extracting graph")
147
  self._on_error(
148
  e,
 
185
  text = perform_variable_replacements(CONTINUE_PROMPT, history=history, variables=variables)
186
  history.append({"role": "user", "content": text})
187
  response = self._llm.chat("", history, gen_conf)
188
+ if response.find("**ERROR**") >=0: raise Exception(response)
189
  results += response or ""
190
 
191
  # if this is the final glean, don't bother updating the continuation flag