Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -376,7 +376,7 @@ class PromptRefiner:
|
|
376 |
response = self.client.chat_completion(
|
377 |
model="meta-llama/Meta-Llama-3-70B-Instruct",
|
378 |
messages=messages,
|
379 |
-
max_tokens=
|
380 |
temperature=0.3
|
381 |
)
|
382 |
response_content = response.choices[0].message.content.strip()
|
@@ -424,7 +424,7 @@ class PromptRefiner:
|
|
424 |
response = self.client.chat_completion(
|
425 |
model="meta-llama/Meta-Llama-3-70B-Instruct",
|
426 |
messages=messages,
|
427 |
-
max_tokens=
|
428 |
temperature=0.8
|
429 |
)
|
430 |
|
|
|
376 |
response = self.client.chat_completion(
|
377 |
model="meta-llama/Meta-Llama-3-70B-Instruct",
|
378 |
messages=messages,
|
379 |
+
max_tokens=4000,
|
380 |
temperature=0.3
|
381 |
)
|
382 |
response_content = response.choices[0].message.content.strip()
|
|
|
424 |
response = self.client.chat_completion(
|
425 |
model="meta-llama/Meta-Llama-3-70B-Instruct",
|
426 |
messages=messages,
|
427 |
+
max_tokens=4000, # Increased token limit
|
428 |
temperature=0.8
|
429 |
)
|
430 |
|