SynWBM / SynWBM.py
karolyartur's picture
Update Dataset Card/ add loader script
61109a1
raw
history blame
6.05 kB
#
# This file is part of the SynWBM distribution (https://huggingface.co/datasets/ABC-iRobotics/SynWBM).
# Copyright (c) 2023 ABC-iRobotics.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""SynWBM dataset"""
import sys
if sys.version_info < (3, 9):
from typing import Sequence, Generator, Tuple
else:
from collections.abc import Sequence, Generator
Tuple = tuple
from typing import Optional, IO
import datasets
import itertools
# ---- Constants ----
_CITATION = """\
COMING SOON
"""
_DESCRIPTION = """\
A synthetic instance segmentation dataset for white button mushrooms (Agaricus bisporus).
The dataset incorporates rendered and generated synthetic images for training mushroom segmentation models.
"""
_HOMEPAGE = "https://huggingface.co/datasets/ABC-iRobotics/SynWBM"
_LICENSE = "GNU General Public License v3.0"
_LATEST_VERSIONS = {
"all": "1.0.0",
"blender": "1.0.0",
"sdxl": "1.0.0",
}
BASE_URL = "https://huggingface.co/datasets/ABC-iRobotics/SynWBM/resolve/main/"
# ---- SynWBM dataset Configs ----
class SynWBMDatasetConfig(datasets.BuilderConfig):
"""BuilderConfig for SynWBM dataset."""
def __init__(self, name: str, base_urls: Sequence[str], images_txt: str, version: Optional[str] = None, **kwargs):
_version = _LATEST_VERSIONS[name] if version is None else version
super(SynWBMDatasetConfig, self).__init__(version=datasets.Version(_version), name=name, **kwargs)
with open(images_txt, 'r') as f:
image_list = f.readlines()
img_urls = []
depth_urls = []
mask_urls = []
for base_url in base_urls:
img_urls.extend([base_url + image.strip() for image in image_list])
depth_urls.extend([BASE_URL + "depths/" + image.strip() for image in image_list])
mask_urls.extend([BASE_URL + "masks/" + image.strip() for image in image_list])
self._imgs_urls = img_urls
self._depth_urls = depth_urls
self._masks_urls = mask_urls
@property
def features(self):
return datasets.Features(
{
"image": datasets.Image(),
"depth": datasets.Image(),
"mask": datasets.Image(),
}
)
@property
def supervised_keys(self):
return None
# ---- SynWBM dataset Loader ----
class SynWBMDataset(datasets.GeneratorBasedBuilder):
"""SynWBM dataset."""
BUILDER_CONFIG_CLASS = SynWBMDatasetConfig
BUILDER_CONFIGS = [
SynWBMDatasetConfig(
name = "all",
description = "All images",
base_urls = [
BASE_URL + "rendered/",
BASE_URL + "generated/"
],
images_txt = "images.txt"
),
SynWBMDatasetConfig(
name = "blender",
description = "Synthetic images rendered using Blender",
base_urls = [
BASE_URL + "rendered/"
],
images_txt = "images.txt"
),
SynWBMDatasetConfig(
name = "sdxl",
description = "Synthetic images generated by Stable Diffusion XL",
base_urls = [
BASE_URL + "generated/"
],
images_txt = "images.txt"
),
]
DEFAULT_WRITER_BATCH_SIZE = 10
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=self.config.features,
supervised_keys=self.config.supervised_keys,
homepage=_HOMEPAGE,
license=_LICENSE,
citation=_CITATION,
version=self.config.version,
)
def _split_generators(self, dl_manager):
imgs_paths = dl_manager.download(self.config._imgs_urls)
depths_paths = dl_manager.download(self.config._depth_urls)
masks_paths = dl_manager.download(self.config._masks_urls)
imgs_gen = itertools.chain.from_iterable([dl_manager.iter_archive(path) for path in imgs_paths])
depths_gen = itertools.chain.from_iterable([dl_manager.iter_archive(path) for path in depths_paths])
masks_gen = itertools.chain.from_iterable([dl_manager.iter_archive(path) for path in masks_paths])
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
gen_kwargs={
"images": imgs_gen,
"depths": depths_gen,
"masks": masks_gen,
},
),
]
def _generate_examples(
self,
images: Generator[Tuple[str,IO], None, None],
depths: Generator[Tuple[str,IO], None, None],
masks: Generator[Tuple[str,IO], None, None],
):
for i, (img_info, depth_info, mask_info) in enumerate(zip(images, depths, masks)):
img_file_path, img_file_obj = img_info
depth_file_path, depth_file_obj = depth_info
mask_file_path, mask_file_obj = mask_info
img_bytes = img_file_obj.read()
depth_bytes = depth_file_obj.read()
mask_bytes = mask_file_obj.read()
img_file_obj.close()
depth_file_obj.close()
mask_file_obj.close()
yield i, {
"image": {"path": img_file_path, "bytes": img_bytes},
"depth": {"path": depth_file_path, "bytes": depth_bytes},
"mask": {"path": mask_file_path, "bytes": mask_bytes},
}