ggunio commited on
Commit
da2970e
Β·
verified Β·
1 Parent(s): 3a4dfa2

Fix: Increase max_length to 256 for proper text reconstruction

Browse files
Files changed (1) hide show
  1. core/unified_model.py +1 -1
core/unified_model.py CHANGED
@@ -251,7 +251,7 @@ class IntelligentTokenizerV62(nn.Module):
251
  def generate(self,
252
  text: str = None,
253
  input_ids: torch.Tensor = None,
254
- max_length: int = 48,
255
  temperature: float = 0.1,
256
  top_k: int = 10,
257
  top_p: float = 0.95) -> str:
 
251
  def generate(self,
252
  text: str = None,
253
  input_ids: torch.Tensor = None,
254
+ max_length: int = 256,
255
  temperature: float = 0.1,
256
  top_k: int = 10,
257
  top_p: float = 0.95) -> str: