thenativefox commited on
Commit
20d283e
1 Parent(s): 93c49cb

add .manifest files

Browse files
Files changed (2) hide show
  1. .gitattributes +1 -0
  2. backend/query_llm.py +2 -8
.gitattributes CHANGED
@@ -35,3 +35,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  *.lance filter=lfs diff=lfs merge=lfs -text
37
  *.idx filter=lfs diff=lfs merge=lfs -text
 
 
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  *.lance filter=lfs diff=lfs merge=lfs -text
37
  *.idx filter=lfs diff=lfs merge=lfs -text
38
+ *.manifest filter=lfs diff=lfs merge=lfs -text
backend/query_llm.py CHANGED
@@ -120,14 +120,6 @@ def generate_openai(prompt: str, history: str) -> Generator[str, None, str]:
120
  formatted_prompt = format_prompt(prompt, "openai")
121
 
122
  try:
123
- # response = OAI_CLIENT.chat.completions.create(
124
- # model=os.getenv("OPENAI_MODEL"),
125
- # messages=formatted_prompt,
126
- # **OAI_GENERATE_KWARGS
127
- # )
128
- # logging.info("SIMPLE OUTPUT")
129
- # logging.info(response.choices[0].message.content)
130
-
131
  stream = OAI_CLIENT.chat.completions.create(
132
  model=os.getenv("OPENAI_MODEL"),
133
  messages=formatted_prompt,
@@ -137,6 +129,8 @@ def generate_openai(prompt: str, history: str) -> Generator[str, None, str]:
137
  output = ""
138
  for chunk in stream:
139
  if chunk.choices[0].delta.content:
 
 
140
  output += chunk.choices[0].delta.content
141
  yield output
142
 
 
120
  formatted_prompt = format_prompt(prompt, "openai")
121
 
122
  try:
 
 
 
 
 
 
 
 
123
  stream = OAI_CLIENT.chat.completions.create(
124
  model=os.getenv("OPENAI_MODEL"),
125
  messages=formatted_prompt,
 
129
  output = ""
130
  for chunk in stream:
131
  if chunk.choices[0].delta.content:
132
+ logging.info("CHUNK CONTENT")
133
+ logging.info(chunk.choices[0].delta.content)
134
  output += chunk.choices[0].delta.content
135
  yield output
136