DeepFurniture / uncompress_dataset.sh
byliu's picture
add README and dataloader
da341b6
raw
history blame
3.7 kB
#!/bin/bash
# Usage function
usage() {
echo "Usage: $0 -s SOURCE_DIR -t TARGET_DIR [-c CHUNK_TYPE] [-h]"
echo "Uncompress chunked DeepFurniture dataset"
echo ""
echo "Required arguments:"
echo " -s SOURCE_DIR Source directory containing the chunked dataset"
echo " -t TARGET_DIR Target directory for the uncompressed dataset"
echo ""
echo "Optional arguments:"
echo " -c CHUNK_TYPE Specific chunk type to process (scenes, furnitures, queries)"
echo " If not specified, all chunk types will be processed"
echo " -h Show this help message"
exit 1
}
# Process command line arguments
while getopts "s:t:c:h" opt; do
case $opt in
s) SOURCE_DIR="$OPTARG";;
t) TARGET_DIR="$OPTARG";;
c) CHUNK_TYPE="$OPTARG";;
h) usage;;
?) usage;;
esac
done
# Check required arguments
if [ -z "$SOURCE_DIR" ] || [ -z "$TARGET_DIR" ]; then
echo "Error: Source and target directories are required"
usage
fi
# Validate source directory
if [ ! -d "$SOURCE_DIR" ]; then
echo "Error: Source directory does not exist: $SOURCE_DIR"
exit 1
fi
# Create target directory structure
mkdir -p "$TARGET_DIR"/{metadata,scenes,furnitures,queries}
# Copy metadata files (excluding index files)
echo "Copying metadata files..."
for file in "$SOURCE_DIR"/metadata/*.json*; do
if [[ ! $file =~ _index.json$ ]]; then
cp "$file" "$TARGET_DIR/metadata/"
fi
done
# Function to process chunks of a specific type
process_chunks() {
local type=$1
local src_dir="$SOURCE_DIR/$type"
local target_dir="$TARGET_DIR/$type"
echo "Processing $type chunks..."
# Check if source directory exists
if [ ! -d "$src_dir" ]; then
echo "Warning: Directory not found: $src_dir"
return
}
# Count total chunks for progress
total_chunks=$(ls "$src_dir"/*.tar.gz 2>/dev/null | wc -l)
if [ "$total_chunks" -eq 0 ]; then
echo "No chunks found in $src_dir"
return
}
# Process each chunk
current=0
for chunk in "$src_dir"/*.tar.gz; do
current=$((current + 1))
chunk_name=$(basename "$chunk")
printf "Extracting %s (%d/%d)..." "$chunk_name" "$current" "$total_chunks"
if tar -xzf "$chunk" -C "$target_dir" 2>/dev/null; then
echo " done"
else
echo " failed"
echo "Warning: Failed to extract $chunk_name"
fi
done
}
# Process chunks based on input
if [ -n "$CHUNK_TYPE" ]; then
case "$CHUNK_TYPE" in
scenes|furnitures|queries)
process_chunks "$CHUNK_TYPE"
;;
*)
echo "Error: Invalid chunk type: $CHUNK_TYPE"
echo "Valid types are: scenes, furnitures, queries"
exit 1
;;
esac
else
# Process all chunk types
for type in scenes furnitures queries; do
process_chunks "$type"
done
fi
# Basic validation
echo -e "\nValidating extracted files..."
# Check scenes
if [ -z "$CHUNK_TYPE" ] || [ "$CHUNK_TYPE" = "scenes" ]; then
missing_files=0
for scene_dir in "$TARGET_DIR"/scenes/*; do
if [ -d "$scene_dir" ]; then
for required in "image.jpg" "annotation.json"; do
if [ ! -f "$scene_dir/$required" ]; then
echo "Warning: Missing $required in $(basename "$scene_dir")"
missing_files=$((missing_files + 1))
fi
done
fi
done
echo "Scene validation complete. Missing files: $missing_files"
fi
echo "Dataset uncompression completed!"
echo "Output directory: $TARGET_DIR"