gzdaniel commited on
Commit
a265e60
·
1 Parent(s): c298a71

Fix linting

Browse files
Files changed (1) hide show
  1. lightrag/kg/postgres_impl.py +19 -7
lightrag/kg/postgres_impl.py CHANGED
@@ -160,7 +160,9 @@ class PostgreSQLDB:
160
  )
161
 
162
  # Migrate existing data: extract cache_type from flattened keys
163
- logger.info("Migrating existing LLM cache data to populate cache_type field")
 
 
164
  update_sql = """
165
  UPDATE LIGHTRAG_LLM_CACHE
166
  SET cache_type = CASE
@@ -176,7 +178,9 @@ class PostgreSQLDB:
176
  "cache_type column already exists in LIGHTRAG_LLM_CACHE table"
177
  )
178
  except Exception as e:
179
- logger.warning(f"Failed to add cache_type column to LIGHTRAG_LLM_CACHE: {e}")
 
 
180
 
181
  async def _migrate_timestamp_columns(self):
182
  """Migrate timestamp columns in tables to timezone-aware types, assuming original data is in UTC time"""
@@ -345,7 +349,7 @@ class PostgreSQLDB:
345
 
346
  # Determine cache_type based on mode
347
  cache_type = "extract" if record["mode"] == "default" else "unknown"
348
-
349
  # Generate new flattened key
350
  new_key = f"{record['mode']}:{cache_type}:{new_hash}"
351
 
@@ -519,7 +523,9 @@ class PostgreSQLDB:
519
  try:
520
  await self._migrate_llm_cache_add_cache_type()
521
  except Exception as e:
522
- logger.error(f"PostgreSQL, Failed to migrate LLM cache cache_type field: {e}")
 
 
523
  # Don't throw an exception, allow the initialization process to continue
524
 
525
  # Finally, attempt to migrate old doc chunks data if needed
@@ -787,7 +793,9 @@ class PGKVStorage(BaseKVStorage):
787
  response["llm_cache_list"] = llm_cache_list
788
 
789
  # Special handling for LLM cache to ensure compatibility with _get_cached_extraction_results
790
- if response and is_namespace(self.namespace, NameSpace.KV_STORE_LLM_RESPONSE_CACHE):
 
 
791
  # Map field names and add cache_type for compatibility
792
  response = {
793
  **response,
@@ -821,7 +829,9 @@ class PGKVStorage(BaseKVStorage):
821
  result["llm_cache_list"] = llm_cache_list
822
 
823
  # Special handling for LLM cache to ensure compatibility with _get_cached_extraction_results
824
- if results and is_namespace(self.namespace, NameSpace.KV_STORE_LLM_RESPONSE_CACHE):
 
 
825
  processed_results = []
826
  for row in results:
827
  # Map field names and add cache_type for compatibility
@@ -901,7 +911,9 @@ class PGKVStorage(BaseKVStorage):
901
  "return_value": v["return"],
902
  "mode": v.get("mode", "default"), # Get mode from data
903
  "chunk_id": v.get("chunk_id"),
904
- "cache_type": v.get("cache_type", "extract"), # Get cache_type from data
 
 
905
  }
906
 
907
  await self.db.execute(upsert_sql, _data)
 
160
  )
161
 
162
  # Migrate existing data: extract cache_type from flattened keys
163
+ logger.info(
164
+ "Migrating existing LLM cache data to populate cache_type field"
165
+ )
166
  update_sql = """
167
  UPDATE LIGHTRAG_LLM_CACHE
168
  SET cache_type = CASE
 
178
  "cache_type column already exists in LIGHTRAG_LLM_CACHE table"
179
  )
180
  except Exception as e:
181
+ logger.warning(
182
+ f"Failed to add cache_type column to LIGHTRAG_LLM_CACHE: {e}"
183
+ )
184
 
185
  async def _migrate_timestamp_columns(self):
186
  """Migrate timestamp columns in tables to timezone-aware types, assuming original data is in UTC time"""
 
349
 
350
  # Determine cache_type based on mode
351
  cache_type = "extract" if record["mode"] == "default" else "unknown"
352
+
353
  # Generate new flattened key
354
  new_key = f"{record['mode']}:{cache_type}:{new_hash}"
355
 
 
523
  try:
524
  await self._migrate_llm_cache_add_cache_type()
525
  except Exception as e:
526
+ logger.error(
527
+ f"PostgreSQL, Failed to migrate LLM cache cache_type field: {e}"
528
+ )
529
  # Don't throw an exception, allow the initialization process to continue
530
 
531
  # Finally, attempt to migrate old doc chunks data if needed
 
793
  response["llm_cache_list"] = llm_cache_list
794
 
795
  # Special handling for LLM cache to ensure compatibility with _get_cached_extraction_results
796
+ if response and is_namespace(
797
+ self.namespace, NameSpace.KV_STORE_LLM_RESPONSE_CACHE
798
+ ):
799
  # Map field names and add cache_type for compatibility
800
  response = {
801
  **response,
 
829
  result["llm_cache_list"] = llm_cache_list
830
 
831
  # Special handling for LLM cache to ensure compatibility with _get_cached_extraction_results
832
+ if results and is_namespace(
833
+ self.namespace, NameSpace.KV_STORE_LLM_RESPONSE_CACHE
834
+ ):
835
  processed_results = []
836
  for row in results:
837
  # Map field names and add cache_type for compatibility
 
911
  "return_value": v["return"],
912
  "mode": v.get("mode", "default"), # Get mode from data
913
  "chunk_id": v.get("chunk_id"),
914
+ "cache_type": v.get(
915
+ "cache_type", "extract"
916
+ ), # Get cache_type from data
917
  }
918
 
919
  await self.db.execute(upsert_sql, _data)