Fix: Increase max_length to 256 for proper text reconstruction
Browse files- core/unified_model.py +1 -1
core/unified_model.py
CHANGED
|
@@ -251,7 +251,7 @@ class IntelligentTokenizerV62(nn.Module):
|
|
| 251 |
def generate(self,
|
| 252 |
text: str = None,
|
| 253 |
input_ids: torch.Tensor = None,
|
| 254 |
-
max_length: int =
|
| 255 |
temperature: float = 0.1,
|
| 256 |
top_k: int = 10,
|
| 257 |
top_p: float = 0.95) -> str:
|
|
|
|
| 251 |
def generate(self,
|
| 252 |
text: str = None,
|
| 253 |
input_ids: torch.Tensor = None,
|
| 254 |
+
max_length: int = 256,
|
| 255 |
temperature: float = 0.1,
|
| 256 |
top_k: int = 10,
|
| 257 |
top_p: float = 0.95) -> str:
|