Cannot instantiate this tokenizer from a slow version.
While using this code :
import torch
from diffusers import StableDiffusion3Pipeline
pipe = StableDiffusion3Pipeline.from_pretrained("stabilityai/stable-diffusion-3.5-large", torch_dtype=torch.bfloat16)
pipe = pipe.to("cuda")
image = pipe(
"A capybara holding a sign that reads Hello World",
num_inference_steps=28,
guidance_scale=3.5,
).images[0]
image.save("capybara.png")
I have this error :
Traceback (most recent call last):
File "d:\Pro_Perso\code\GeneratePicture_flux\generatePicture_flux.py", line 4, in
pipe = StableDiffusion3Pipeline.from_pretrained("stabilityai/stable-diffusion-3.5-large", torch_dtype=torch.bfloat16)
File "D:\Pro_Perso\code\GeneratePicture_flux.venv\Lib\site-packages\huggingface_hub\utils_validators.py", line 114, in _inner_fn
return fn(*args, **kwargs)
File "D:\Pro_Perso\code\GeneratePicture_flux.venv\Lib\site-packages\diffusers\pipelines\pipeline_utils.py", line 924, in from_pretrained
loaded_sub_model = load_sub_model(
library_name=library_name,
...<18 lines>...
use_safetensors=use_safetensors,
)
File "D:\Pro_Perso\code\GeneratePicture_flux.venv\Lib\site-packages\diffusers\pipelines\pipeline_loading_utils.py", line 725, in load_sub_model
loaded_sub_model = load_method(os.path.join(cached_folder, name), **loading_kwargs)
File "D:\Pro_Perso\code\GeneratePicture_flux.venv\Lib\site-packages\transformers\tokenization_utils_base.py", line 2052, in from_pretrained
return cls._from_pretrained(
~~~~~~~~~~~~~~~~~~~~^
resolved_vocab_files,
^^^^^^^^^^^^^^^^^^^^^
...<9 lines>...
**kwargs,
^^^^^^^^^
)
^
File "D:\Pro_Perso\code\GeneratePicture_flux.venv\Lib\site-packages\transformers\tokenization_utils_base.py", line 2292, in _from_pretrained
tokenizer = cls(*init_inputs, **init_kwargs)
File "D:\Pro_Perso\code\GeneratePicture_flux.venv\Lib\site-packages\transformers\models\t5\tokenization_t5_fast.py", line 119, in init
super().init(
~~~~~~~~~~~~~~~~^
vocab_file=vocab_file,
^^^^^^^^^^^^^^^^^^^^^^
...<7 lines>...
**kwargs,
^^^^^^^^^
)
^
File "D:\Pro_Perso\code\GeneratePicture_flux.venv\Lib\site-packages\transformers\tokenization_utils_fast.py", line 108, in init
raise ValueError(
...<2 lines>...
)
ValueError: Cannot instantiate this tokenizer from a slow version. If it's based on sentencepiece, make sure you have sentencepiece installed.
Install Conda first, then install the sentencepiece library using Conda
conda install -c conda-forge sentencepiece