|
import argparse |
|
from io import BytesIO |
|
from pathlib import Path |
|
from huggingface_hub import list_repo_tree, hf_hub_url, HfFileSystem |
|
from huggingface_hub.hf_api import RepoFile |
|
import fsspec |
|
from PIL import Image |
|
from tqdm import tqdm |
|
|
|
def enumerate_chunks(repo_id, images_parent): |
|
""" |
|
Lists all immediate chunk subdirs under the images parent using HfFileSystem. |
|
Returns sorted list of subdir names (e.g. ['000', '001', ...]). |
|
""" |
|
fs = HfFileSystem() |
|
repo_path = f"datasets/{repo_id}/{images_parent}" |
|
entries = fs.ls(repo_path, detail=True) |
|
subdirs = [entry['name'].split('/')[-1] for entry in entries if entry['type'] == 'directory'] |
|
subdirs.sort() |
|
return subdirs |
|
|
|
def sample_dataset( |
|
repo_id: str, |
|
images_parent: str, |
|
labels_parent: str, |
|
output_dir: str, |
|
|
|
flatten: bool = True, |
|
chunks: list = None |
|
): |
|
total_downloaded = 0 |
|
all_chunks = chunks |
|
if all_chunks is None: |
|
all_chunks = enumerate_chunks(repo_id, images_parent) |
|
print(f"Found chunks: {all_chunks}") |
|
for chunk in all_chunks: |
|
image_subdir = f"{images_parent}/{chunk}" |
|
label_subdir = f"{labels_parent}/{chunk}" |
|
|
|
|
|
image_files = list_repo_tree( |
|
repo_id=repo_id, |
|
path_in_repo=image_subdir, |
|
repo_type="dataset", |
|
recursive=True, |
|
) |
|
|
|
for img_file in tqdm(image_files, desc=f"Downloading {chunk}", leave=False): |
|
if not isinstance(img_file, RepoFile) or not img_file.path.lower().endswith(".png"): |
|
continue |
|
|
|
rel_path = Path(img_file.path).relative_to(image_subdir) |
|
label_path = f"{label_subdir}/{rel_path.with_suffix('.txt')}" |
|
|
|
if flatten: |
|
parts = img_file.path.split('/') |
|
|
|
|
|
flat_path = '/'.join(parts[:-2] + [parts[-1]]) |
|
|
|
flat_label_path = flat_path.replace('.png', '.txt').replace('images', 'labels') |
|
local_image_path = Path(output_dir) / flat_path |
|
local_label_path = Path(output_dir) / flat_label_path |
|
else: |
|
local_image_path = Path(output_dir) / img_file.path |
|
local_label_path = Path(output_dir) / label_path |
|
|
|
local_image_path.parent.mkdir(parents=True, exist_ok=True) |
|
local_label_path.parent.mkdir(parents=True, exist_ok=True) |
|
|
|
image_url = hf_hub_url(repo_id=repo_id, filename=img_file.path, repo_type="dataset") |
|
label_url = hf_hub_url(repo_id=repo_id, filename=label_path, repo_type="dataset") |
|
try: |
|
with fsspec.open(image_url) as f: |
|
image = Image.open(BytesIO(f.read())) |
|
image.save(local_image_path) |
|
with fsspec.open(label_url) as f: |
|
txt_content = f.read() |
|
with open(local_label_path, "wb") as out_f: |
|
out_f.write(txt_content) |
|
total_downloaded += 1 |
|
except Exception as e: |
|
print(f"Failed {rel_path}: {e}") |
|
|
|
|
|
|
|
|
|
print(f"Downloaded {total_downloaded} image/txt pairs.") |
|
print(f"Saved under: {Path(output_dir).resolve()}") |
|
|
|
def parse_args(): |
|
parser = argparse.ArgumentParser(description="Stream and sample paired images + txt labels from a Hugging Face folder-structured dataset, optionally across multiple chunks.") |
|
parser.add_argument("--repo-id", default="JeffreyJsam/SWiM-SpacecraftWithMasks", help="Hugging Face dataset repo ID.") |
|
parser.add_argument("--images-parent", default="Baseline/images/val", help="Parent directory for image chunks.") |
|
parser.add_argument("--labels-parent", default="Baseline/labels/val", help="Parent directory for label chunks.") |
|
parser.add_argument("--output-dir", default="./SWiM", help="Where to save sampled data.") |
|
|
|
parser.add_argument("--flatten", default=True, type=bool, help="Save all samples in a single folder without subdirectories.") |
|
parser.add_argument("--chunks", nargs="*", default=None, help="Specific chunk names to sample (e.g. 000 001). Leave empty to process all.") |
|
return parser.parse_args() |
|
|
|
if __name__ == "__main__": |
|
args = parse_args() |
|
sample_dataset( |
|
repo_id=args.repo_id, |
|
images_parent=args.images_parent, |
|
labels_parent=args.labels_parent, |
|
output_dir=args.output_dir, |
|
|
|
flatten=args.flatten, |
|
chunks=args.chunks |
|
) |
|
|