b10902118 commited on
Commit
ed5dbbb
·
1 Parent(s): 292daea

fix error and tested

Browse files
Files changed (1) hide show
  1. lightrag/llm.py +9 -4
lightrag/llm.py CHANGED
@@ -70,9 +70,14 @@ async def openai_complete_if_cache(
70
  if if_cache_return is not None:
71
  return if_cache_return["return"]
72
 
73
- response = await openai_async_client.chat.completions.create(
74
- model=model, messages=messages, **kwargs
75
- )
 
 
 
 
 
76
  content = response.choices[0].message.content
77
  if r"\u" in content:
78
  content = content.encode("utf-8").decode("unicode_escape")
@@ -545,7 +550,7 @@ async def ollama_model_complete(
545
  prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
546
  ) -> str:
547
  if keyword_extraction:
548
- kwargs["response_format"] = "json"
549
  model_name = kwargs["hashing_kv"].global_config["llm_model_name"]
550
  return await ollama_model_if_cache(
551
  model_name,
 
70
  if if_cache_return is not None:
71
  return if_cache_return["return"]
72
 
73
+ if "response_format" in kwargs:
74
+ response = await openai_async_client.beta.chat.completions.parse(
75
+ model=model, messages=messages, **kwargs
76
+ )
77
+ else:
78
+ response = await openai_async_client.chat.completions.create(
79
+ model=model, messages=messages, **kwargs
80
+ )
81
  content = response.choices[0].message.content
82
  if r"\u" in content:
83
  content = content.encode("utf-8").decode("unicode_escape")
 
550
  prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
551
  ) -> str:
552
  if keyword_extraction:
553
+ kwargs["format"] = "json"
554
  model_name = kwargs["hashing_kv"].global_config["llm_model_name"]
555
  return await ollama_model_if_cache(
556
  model_name,