lukestanley commited on
Commit
3ebb6e1
1 Parent(s): fbb0bdf

Print new line after LLM output end and some linting

Browse files
Files changed (1) hide show
  1. utils.py +1 -2
utils.py CHANGED
@@ -52,6 +52,7 @@ def llm_streaming(
52
  if new_token:
53
  output_text = output_text + new_token
54
  print(new_token, sep="", end="", flush=True)
 
55
 
56
  if return_pydantic_object:
57
  model_object = pydantic_model_class.model_validate_json(output_text)
@@ -69,8 +70,6 @@ def replace_text(template: str, replacements: dict) -> str:
69
 
70
  def query_ai_prompt(prompt, replacements, model_class):
71
  prompt = replace_text(prompt, replacements)
72
- # print('prompt')
73
- # print(prompt)
74
  return llm_streaming(prompt, model_class)
75
 
76
 
 
52
  if new_token:
53
  output_text = output_text + new_token
54
  print(new_token, sep="", end="", flush=True)
55
+ print('\n')
56
 
57
  if return_pydantic_object:
58
  model_object = pydantic_model_class.model_validate_json(output_text)
 
70
 
71
  def query_ai_prompt(prompt, replacements, model_class):
72
  prompt = replace_text(prompt, replacements)
 
 
73
  return llm_streaming(prompt, model_class)
74
 
75