yangdx commited on
Commit
71fd78c
·
1 Parent(s): d52ee9d

Fix prompt respond cache fail when is_embedding_cache_enabled is true

Browse files
Files changed (1) hide show
  1. lightrag/utils.py +1 -1
lightrag/utils.py CHANGED
@@ -523,7 +523,7 @@ async def handle_cache(
523
  mode=mode,
524
  use_llm_check=use_llm_check,
525
  llm_func=llm_model_func if use_llm_check else None,
526
- original_prompt=prompt if use_llm_check else None,
527
  cache_type=cache_type,
528
  )
529
  if best_cached_response is not None:
 
523
  mode=mode,
524
  use_llm_check=use_llm_check,
525
  llm_func=llm_model_func if use_llm_check else None,
526
+ original_prompt=prompt,
527
  cache_type=cache_type,
528
  )
529
  if best_cached_response is not None: