Upload ai21labs_AI21-Jamba2-3B_0.txt with huggingface_hub
Browse files
ai21labs_AI21-Jamba2-3B_0.txt
CHANGED
|
@@ -11,7 +11,7 @@ pipe(messages)
|
|
| 11 |
|
| 12 |
ERROR:
|
| 13 |
Traceback (most recent call last):
|
| 14 |
-
File "/tmp/ai21labs_AI21-Jamba2-
|
| 15 |
pipe(messages)
|
| 16 |
~~~~^^^^^^^^^^
|
| 17 |
File "/tmp/.cache/uv/environments-v2/1be6ad9d97bb6ec1/lib/python3.13/site-packages/transformers/pipelines/text_generation.py", line 325, in __call__
|
|
@@ -28,13 +28,13 @@ Traceback (most recent call last):
|
|
| 28 |
output = self.model.generate(input_ids=input_ids, attention_mask=attention_mask, **generate_kwargs)
|
| 29 |
File "/tmp/.cache/uv/environments-v2/1be6ad9d97bb6ec1/lib/python3.13/site-packages/torch/utils/_contextlib.py", line 120, in decorate_context
|
| 30 |
return func(*args, **kwargs)
|
| 31 |
-
File "/tmp/.cache/uv/environments-v2/1be6ad9d97bb6ec1/lib/python3.13/site-packages/transformers/generation/utils.py", line
|
| 32 |
result = decoding_method(
|
| 33 |
self,
|
| 34 |
...<5 lines>...
|
| 35 |
**model_kwargs,
|
| 36 |
)
|
| 37 |
-
File "/tmp/.cache/uv/environments-v2/1be6ad9d97bb6ec1/lib/python3.13/site-packages/transformers/generation/utils.py", line
|
| 38 |
outputs = self(**model_inputs, return_dict=True)
|
| 39 |
File "/tmp/.cache/uv/environments-v2/1be6ad9d97bb6ec1/lib/python3.13/site-packages/torch/nn/modules/module.py", line 1775, in _wrapped_call_impl
|
| 40 |
return self._call_impl(*args, **kwargs)
|
|
|
|
| 11 |
|
| 12 |
ERROR:
|
| 13 |
Traceback (most recent call last):
|
| 14 |
+
File "/tmp/ai21labs_AI21-Jamba2-3B_0LXFVxE.py", line 30, in <module>
|
| 15 |
pipe(messages)
|
| 16 |
~~~~^^^^^^^^^^
|
| 17 |
File "/tmp/.cache/uv/environments-v2/1be6ad9d97bb6ec1/lib/python3.13/site-packages/transformers/pipelines/text_generation.py", line 325, in __call__
|
|
|
|
| 28 |
output = self.model.generate(input_ids=input_ids, attention_mask=attention_mask, **generate_kwargs)
|
| 29 |
File "/tmp/.cache/uv/environments-v2/1be6ad9d97bb6ec1/lib/python3.13/site-packages/torch/utils/_contextlib.py", line 120, in decorate_context
|
| 30 |
return func(*args, **kwargs)
|
| 31 |
+
File "/tmp/.cache/uv/environments-v2/1be6ad9d97bb6ec1/lib/python3.13/site-packages/transformers/generation/utils.py", line 2566, in generate
|
| 32 |
result = decoding_method(
|
| 33 |
self,
|
| 34 |
...<5 lines>...
|
| 35 |
**model_kwargs,
|
| 36 |
)
|
| 37 |
+
File "/tmp/.cache/uv/environments-v2/1be6ad9d97bb6ec1/lib/python3.13/site-packages/transformers/generation/utils.py", line 2786, in _sample
|
| 38 |
outputs = self(**model_inputs, return_dict=True)
|
| 39 |
File "/tmp/.cache/uv/environments-v2/1be6ad9d97bb6ec1/lib/python3.13/site-packages/torch/nn/modules/module.py", line 1775, in _wrapped_call_impl
|
| 40 |
return self._call_impl(*args, **kwargs)
|