Upload ByteDance-Seed_Stable-DiffCoder-8B-Instruct_0.txt with huggingface_hub
Browse files
ByteDance-Seed_Stable-DiffCoder-8B-Instruct_0.txt
CHANGED
|
@@ -11,20 +11,20 @@ pipe(messages)
|
|
| 11 |
|
| 12 |
ERROR:
|
| 13 |
Traceback (most recent call last):
|
| 14 |
-
File "/tmp/ByteDance-Seed_Stable-DiffCoder-8B-
|
| 15 |
pipe(messages)
|
| 16 |
~~~~^^^^^^^^^^
|
| 17 |
-
File "/tmp/.cache/uv/environments-v2/d1df88e7f5c3f72a/lib/python3.13/site-packages/transformers/pipelines/text_generation.py", line
|
| 18 |
-
return super().__call__(
|
| 19 |
-
|
| 20 |
-
File "/tmp/.cache/uv/environments-v2/d1df88e7f5c3f72a/lib/python3.13/site-packages/transformers/pipelines/base.py", line
|
| 21 |
return self.run_single(inputs, preprocess_params, forward_params, postprocess_params)
|
| 22 |
~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 23 |
-
File "/tmp/.cache/uv/environments-v2/d1df88e7f5c3f72a/lib/python3.13/site-packages/transformers/pipelines/base.py", line
|
| 24 |
model_outputs = self.forward(model_inputs, **forward_params)
|
| 25 |
-
File "/tmp/.cache/uv/environments-v2/d1df88e7f5c3f72a/lib/python3.13/site-packages/transformers/pipelines/base.py", line
|
| 26 |
model_outputs = self._forward(model_inputs, **forward_params)
|
| 27 |
-
File "/tmp/.cache/uv/environments-v2/d1df88e7f5c3f72a/lib/python3.13/site-packages/transformers/pipelines/text_generation.py", line
|
| 28 |
output = self.model.generate(input_ids=input_ids, attention_mask=attention_mask, **generate_kwargs)
|
| 29 |
File "/tmp/.cache/uv/environments-v2/d1df88e7f5c3f72a/lib/python3.13/site-packages/torch/utils/_contextlib.py", line 124, in decorate_context
|
| 30 |
return func(*args, **kwargs)
|
|
|
|
| 11 |
|
| 12 |
ERROR:
|
| 13 |
Traceback (most recent call last):
|
| 14 |
+
File "/tmp/ByteDance-Seed_Stable-DiffCoder-8B-Instruct_0rDH99b.py", line 30, in <module>
|
| 15 |
pipe(messages)
|
| 16 |
~~~~^^^^^^^^^^
|
| 17 |
+
File "/tmp/.cache/uv/environments-v2/d1df88e7f5c3f72a/lib/python3.13/site-packages/transformers/pipelines/text_generation.py", line 293, in __call__
|
| 18 |
+
return super().__call__(text_inputs, **kwargs)
|
| 19 |
+
~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^
|
| 20 |
+
File "/tmp/.cache/uv/environments-v2/d1df88e7f5c3f72a/lib/python3.13/site-packages/transformers/pipelines/base.py", line 1274, in __call__
|
| 21 |
return self.run_single(inputs, preprocess_params, forward_params, postprocess_params)
|
| 22 |
~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| 23 |
+
File "/tmp/.cache/uv/environments-v2/d1df88e7f5c3f72a/lib/python3.13/site-packages/transformers/pipelines/base.py", line 1281, in run_single
|
| 24 |
model_outputs = self.forward(model_inputs, **forward_params)
|
| 25 |
+
File "/tmp/.cache/uv/environments-v2/d1df88e7f5c3f72a/lib/python3.13/site-packages/transformers/pipelines/base.py", line 1173, in forward
|
| 26 |
model_outputs = self._forward(model_inputs, **forward_params)
|
| 27 |
+
File "/tmp/.cache/uv/environments-v2/d1df88e7f5c3f72a/lib/python3.13/site-packages/transformers/pipelines/text_generation.py", line 397, in _forward
|
| 28 |
output = self.model.generate(input_ids=input_ids, attention_mask=attention_mask, **generate_kwargs)
|
| 29 |
File "/tmp/.cache/uv/environments-v2/d1df88e7f5c3f72a/lib/python3.13/site-packages/torch/utils/_contextlib.py", line 124, in decorate_context
|
| 30 |
return func(*args, **kwargs)
|