Persistent LLM cache on error
Browse files- lightrag/lightrag.py +9 -1
lightrag/lightrag.py
CHANGED
@@ -1010,6 +1010,10 @@ class LightRAG:
|
|
1010 |
if not task.done():
|
1011 |
task.cancel()
|
1012 |
|
|
|
|
|
|
|
|
|
1013 |
# Update document status to failed
|
1014 |
await self.doc_status.upsert(
|
1015 |
{
|
@@ -1028,7 +1032,7 @@ class LightRAG:
|
|
1028 |
}
|
1029 |
)
|
1030 |
|
1031 |
-
# Semphore
|
1032 |
|
1033 |
if file_extraction_stage_ok:
|
1034 |
try:
|
@@ -1082,6 +1086,10 @@ class LightRAG:
|
|
1082 |
pipeline_status["latest_message"] = error_msg
|
1083 |
pipeline_status["history_messages"].append(error_msg)
|
1084 |
|
|
|
|
|
|
|
|
|
1085 |
# Update document status to failed
|
1086 |
await self.doc_status.upsert(
|
1087 |
{
|
|
|
1010 |
if not task.done():
|
1011 |
task.cancel()
|
1012 |
|
1013 |
+
# Persistent llm cache
|
1014 |
+
if self.llm_response_cache:
|
1015 |
+
await self.llm_response_cache.index_done_callback
|
1016 |
+
|
1017 |
# Update document status to failed
|
1018 |
await self.doc_status.upsert(
|
1019 |
{
|
|
|
1032 |
}
|
1033 |
)
|
1034 |
|
1035 |
+
# Semphore released, concurrency controlled by graph_db_lock in merge_nodes_and_edges instead
|
1036 |
|
1037 |
if file_extraction_stage_ok:
|
1038 |
try:
|
|
|
1086 |
pipeline_status["latest_message"] = error_msg
|
1087 |
pipeline_status["history_messages"].append(error_msg)
|
1088 |
|
1089 |
+
# Persistent llm cache
|
1090 |
+
if self.llm_response_cache:
|
1091 |
+
await self.llm_response_cache.index_done_callback
|
1092 |
+
|
1093 |
# Update document status to failed
|
1094 |
await self.doc_status.upsert(
|
1095 |
{
|