Spaces:
Runtime error
Runtime error
Nithin1905
commited on
Commit
•
6eb966d
1
Parent(s):
234b532
Update main.py
Browse files
main.py
CHANGED
@@ -689,15 +689,15 @@ def investment(queries, query_results, other_info_results):
|
|
689 |
encoding = tiktoken.encoding_for_model("gpt-3.5-turbo")
|
690 |
sys_prompt_token_size = len(encoding.encode(sys_prompt))
|
691 |
|
692 |
-
max_model_tokens =
|
693 |
-
max_chunk_size =
|
694 |
|
695 |
chunks = split_into_chunks(message, token_limit=max_chunk_size)
|
696 |
|
697 |
model = "meta-llama/llama-3-70b-instruct:nitro"
|
698 |
responses = []
|
699 |
tokens_used = 0
|
700 |
-
max_tokens_per_minute =
|
701 |
|
702 |
for chunk in chunks:
|
703 |
chunk_token_size = len(encoding.encode(chunk))
|
|
|
689 |
encoding = tiktoken.encoding_for_model("gpt-3.5-turbo")
|
690 |
sys_prompt_token_size = len(encoding.encode(sys_prompt))
|
691 |
|
692 |
+
max_model_tokens = 7000
|
693 |
+
max_chunk_size = 7000 # Adjust to leave more buffer space
|
694 |
|
695 |
chunks = split_into_chunks(message, token_limit=max_chunk_size)
|
696 |
|
697 |
model = "meta-llama/llama-3-70b-instruct:nitro"
|
698 |
responses = []
|
699 |
tokens_used = 0
|
700 |
+
max_tokens_per_minute = 7000
|
701 |
|
702 |
for chunk in chunks:
|
703 |
chunk_token_size = len(encoding.encode(chunk))
|