green commited on
Commit
1e0ace8
1 Parent(s): 2f91448

Update digestor.py

Browse files
Files changed (1) hide show
  1. digestor.py +1 -1
digestor.py CHANGED
@@ -158,7 +158,7 @@ class Digestor:
158
 
159
  # Finally, chunk the piece, adjusting the chunks if too long.
160
  for i, j in range_list:
161
- if (tokenized_len := len(tokenizer(chunk := ' '.join(fractured[i:j]).replace(' .','.')))) <= self.token_limit: # d[i:j]).replace('\n',' ')))) <= self.token_limit:
162
  chunk_list.append(chunk)
163
  else: # if chunks of <limit> words are too long, back them off.
164
  chunk_list.append(' '.join(chunk.split(' ')[: self.token_limit - tokenized_len ])) # tokenized_len ]).replace('\n',' '))
 
158
 
159
  # Finally, chunk the piece, adjusting the chunks if too long.
160
  for i, j in range_list:
161
+ if (tokenized_len := len(tokenizer(chunk := ' '.join(fractured[i:j]).replace(' . ','.&&')))) <= self.token_limit: # d[i:j]).replace('\n',' ')))) <= self.token_limit:
162
  chunk_list.append(chunk)
163
  else: # if chunks of <limit> words are too long, back them off.
164
  chunk_list.append(' '.join(chunk.split(' ')[: self.token_limit - tokenized_len ])) # tokenized_len ]).replace('\n',' '))