error , DatasetGenerationError: An error occurred while generating the dataset
#4
by
gg22mm
- opened
---------------------------------------------------------------------------
ImportError Traceback (most recent call last)
File ~/miniconda/lib/python3.9/site-packages/datasets/builder.py:1624, in GeneratorBasedBuilder._prepare_split_single(self, gen_kwargs, fpath, file_format, max_shard_size, split_info, check_duplicate_keys, job_id)
1615 writer = writer_class(
1616 features=writer._features,
1617 path=fpath.replace("SSSSS", f"{shard_id:05d}").replace("JJJJJ", f"{job_id:05d}"),
(...)
1622 embed_local_files=embed_local_files,
1623 )
-> 1624 example = self.info.features.encode_example(record) if self.info.features is not None else record
1625 writer.write(example, key)
File ~/miniconda/lib/python3.9/site-packages/datasets/features/features.py:1997, in Features.encode_example(self, example)
1996 example = cast_to_python_objects(example)
-> 1997 return encode_nested_example(self, example)
File ~/miniconda/lib/python3.9/site-packages/datasets/features/features.py:1285, in encode_nested_example(schema, obj, level)
1283 raise ValueError("Got None but expected a dictionary instead")
1284 return (
-> 1285 {k: encode_nested_example(schema[k], obj.get(k), level=level + 1) for k in schema}
1286 if obj is not None
1287 else None
1288 )
1290 elif isinstance(schema, (list, tuple)):
File ~/miniconda/lib/python3.9/site-packages/datasets/features/features.py:1285, in <dictcomp>(.0)
1283 raise ValueError("Got None but expected a dictionary instead")
1284 return (
-> 1285 {k: encode_nested_example(schema[k], obj.get(k), level=level + 1) for k in schema}
1286 if obj is not None
1287 else None
1288 )
1290 elif isinstance(schema, (list, tuple)):
File ~/miniconda/lib/python3.9/site-packages/datasets/features/features.py:1355, in encode_nested_example(schema, obj, level)
1354 elif hasattr(schema, "encode_example"):
-> 1355 return schema.encode_example(obj) if obj is not None else None
1356 # Other object should be directly convertible to a native Arrow type (like Translation and Translation)
File ~/miniconda/lib/python3.9/site-packages/datasets/features/image.py:107, in Image.encode_example(self, value)
106 else:
--> 107 raise ImportError("To support encoding images, please install 'Pillow'.")
109 if isinstance(value, list):
ImportError: To support encoding images, please install 'Pillow'.
The above exception was the direct cause of the following exception:
DatasetGenerationError Traceback (most recent call last)
Cell In[16], line 3
1 from datasets import load_dataset
2 # dataset = load_dataset("HuggingFaceM4/COCO",split="train", data_dir="./" ,trust_remote_code=True) #/kaggle/input/
----> 3 dataset = load_dataset("HuggingFaceM4/COCO",split="train", data_dir="./" ,trust_remote_code=True) #/kaggle/input/
4 dataset
File ~/miniconda/lib/python3.9/site-packages/datasets/load.py:2151, in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, verification_mode, keep_in_memory, save_infos, revision, token, streaming, num_proc, storage_options, trust_remote_code, **config_kwargs)
2148 return builder_instance.as_streaming_dataset(split=split)
2150 # Download and prepare data
-> 2151 builder_instance.download_and_prepare(
2152 download_config=download_config,
2153 download_mode=download_mode,
2154 verification_mode=verification_mode,
2155 num_proc=num_proc,
2156 storage_options=storage_options,
2157 )
2159 # Build dataset for splits
2160 keep_in_memory = (
2161 keep_in_memory if keep_in_memory is not None else is_small_dataset(builder_instance.info.dataset_size)
2162 )
File ~/miniconda/lib/python3.9/site-packages/datasets/builder.py:924, in DatasetBuilder.download_and_prepare(self, output_dir, download_config, download_mode, verification_mode, dl_manager, base_path, file_format, max_shard_size, num_proc, storage_options, **download_and_prepare_kwargs)
922 if num_proc is not None:
923 prepare_split_kwargs["num_proc"] = num_proc
--> 924 self._download_and_prepare(
925 dl_manager=dl_manager,
926 verification_mode=verification_mode,
927 **prepare_split_kwargs,
928 **download_and_prepare_kwargs,
929 )
930 # Sync info
931 self.info.dataset_size = sum(split.num_bytes for split in self.info.splits.values())
File ~/miniconda/lib/python3.9/site-packages/datasets/builder.py:1648, in GeneratorBasedBuilder._download_and_prepare(self, dl_manager, verification_mode, **prepare_splits_kwargs)
1647 def _download_and_prepare(self, dl_manager, verification_mode, **prepare_splits_kwargs):
-> 1648 super()._download_and_prepare(
1649 dl_manager,
1650 verification_mode,
1651 check_duplicate_keys=verification_mode == VerificationMode.BASIC_CHECKS
1652 or verification_mode == VerificationMode.ALL_CHECKS,
1653 **prepare_splits_kwargs,
1654 )
File ~/miniconda/lib/python3.9/site-packages/datasets/builder.py:1000, in DatasetBuilder._download_and_prepare(self, dl_manager, verification_mode, **prepare_split_kwargs)
996 split_dict.add(split_generator.split_info)
998 try:
999 # Prepare split will record examples associated to the split
-> 1000 self._prepare_split(split_generator, **prepare_split_kwargs)
1001 except OSError as e:
1002 raise OSError(
1003 "Cannot find data file. "
1004 + (self.manual_download_instructions or "")
1005 + "\nOriginal error:\n"
1006 + str(e)
1007 ) from None
File ~/miniconda/lib/python3.9/site-packages/datasets/builder.py:1486, in GeneratorBasedBuilder._prepare_split(self, split_generator, check_duplicate_keys, file_format, num_proc, max_shard_size)
1484 job_id = 0
1485 with pbar:
-> 1486 for job_id, done, content in self._prepare_split_single(
1487 gen_kwargs=gen_kwargs, job_id=job_id, **_prepare_split_args
1488 ):
1489 if done:
1490 result = content
File ~/miniconda/lib/python3.9/site-packages/datasets/builder.py:1643, in GeneratorBasedBuilder._prepare_split_single(self, gen_kwargs, fpath, file_format, max_shard_size, split_info, check_duplicate_keys, job_id)
1641 if isinstance(e, SchemaInferenceError) and e.__context__ is not None:
1642 e = e.__context__
-> 1643 raise DatasetGenerationError("An error occurred while generating the dataset") from e
1645 yield job_id, True, (total_num_examples, total_num_bytes, writer._features, num_shards, shard_lengths)
DatasetGenerationError: An error occurred while generating the dataset