ariG23498 HF Staff commited on
Commit
a22a86d
·
verified ·
1 Parent(s): 2791489

Upload black-forest-labs_FLUX.1-Kontext-dev_1.txt with huggingface_hub

Browse files
black-forest-labs_FLUX.1-Kontext-dev_1.txt ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ```CODE:
2
+ from diffusers import DiffusionPipeline
3
+ from diffusers.utils import load_image
4
+
5
+ pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-Kontext-dev")
6
+
7
+ prompt = "Turn this cat into a dog"
8
+ input_image = load_image("https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/diffusers/cat.png")
9
+
10
+ image = pipe(image=input_image, prompt=prompt).images[0]
11
+ ```
12
+
13
+ ERROR:
14
+ Traceback (most recent call last):
15
+ File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/transformers/tokenization_utils_base.py", line 2343, in _from_pretrained
16
+ tokenizer = cls(*init_inputs, **init_kwargs)
17
+ File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/transformers/models/t5/tokenization_t5_fast.py", line 119, in __init__
18
+ super().__init__(
19
+ ~~~~~~~~~~~~~~~~^
20
+ vocab_file=vocab_file,
21
+ ^^^^^^^^^^^^^^^^^^^^^^
22
+ ...<7 lines>...
23
+ **kwargs,
24
+ ^^^^^^^^^
25
+ )
26
+ ^
27
+ File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/transformers/tokenization_utils_fast.py", line 108, in __init__
28
+ raise ValueError(
29
+ ...<2 lines>...
30
+ )
31
+ ValueError: Cannot instantiate this tokenizer from a slow version. If it's based on sentencepiece, make sure you have sentencepiece installed.
32
+
33
+ During handling of the above exception, another exception occurred:
34
+
35
+ Traceback (most recent call last):
36
+ File "/tmp/black-forest-labs_FLUX.1-Kontext-dev_1W1IjY3.py", line 19, in <module>
37
+ pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-Kontext-dev")
38
+ File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/huggingface_hub/utils/_validators.py", line 114, in _inner_fn
39
+ return fn(*args, **kwargs)
40
+ File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/diffusers/pipelines/pipeline_utils.py", line 1025, in from_pretrained
41
+ loaded_sub_model = load_sub_model(
42
+ library_name=library_name,
43
+ ...<21 lines>...
44
+ quantization_config=quantization_config,
45
+ )
46
+ File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/diffusers/pipelines/pipeline_loading_utils.py", line 860, in load_sub_model
47
+ loaded_sub_model = load_method(os.path.join(cached_folder, name), **loading_kwargs)
48
+ File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/transformers/tokenization_utils_base.py", line 2097, in from_pretrained
49
+ return cls._from_pretrained(
50
+ ~~~~~~~~~~~~~~~~~~~~^
51
+ resolved_vocab_files,
52
+ ^^^^^^^^^^^^^^^^^^^^^
53
+ ...<9 lines>...
54
+ **kwargs,
55
+ ^^^^^^^^^
56
+ )
57
+ ^
58
+ File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/transformers/tokenization_utils_base.py", line 2344, in _from_pretrained
59
+ except import_protobuf_decode_error():
60
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^
61
+ File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/transformers/tokenization_utils_base.py", line 87, in import_protobuf_decode_error
62
+ raise ImportError(PROTOBUF_IMPORT_ERROR.format(error_message))
63
+ ImportError:
64
+ requires the protobuf library but it was not found in your environment. Check out the instructions on the
65
+ installation page of its repo: https://github.com/protocolbuffers/protobuf/tree/master/python#installation and follow the ones
66
+ that match your environment. Please note that you may need to restart your runtime after installation.
67
+