|
#!/bin/bash |
|
|
|
|
|
usage() { |
|
echo "Usage: $0 -s SOURCE_DIR -t TARGET_DIR [-c CHUNK_TYPE] [-h]" |
|
echo "Uncompress chunked DeepFurniture dataset" |
|
echo "" |
|
echo "Required arguments:" |
|
echo " -s SOURCE_DIR Source directory containing the chunked dataset" |
|
echo " -t TARGET_DIR Target directory for the uncompressed dataset" |
|
echo "" |
|
echo "Optional arguments:" |
|
echo " -c CHUNK_TYPE Specific chunk type to process (scenes, furnitures, queries)" |
|
echo " If not specified, all chunk types will be processed" |
|
echo " -h Show this help message" |
|
exit 1 |
|
} |
|
|
|
|
|
while getopts "s:t:c:h" opt; do |
|
case $opt in |
|
s) SOURCE_DIR="$OPTARG";; |
|
t) TARGET_DIR="$OPTARG";; |
|
c) CHUNK_TYPE="$OPTARG";; |
|
h) usage;; |
|
?) usage;; |
|
esac |
|
done |
|
|
|
|
|
if [ -z "$SOURCE_DIR" ] || [ -z "$TARGET_DIR" ]; then |
|
echo "Error: Source and target directories are required" |
|
usage |
|
fi |
|
|
|
|
|
if [ ! -d "$SOURCE_DIR" ]; then |
|
echo "Error: Source directory does not exist: $SOURCE_DIR" |
|
exit 1 |
|
fi |
|
|
|
|
|
mkdir -p "$TARGET_DIR"/{metadata,scenes,furnitures,queries} |
|
|
|
|
|
echo "Copying metadata files..." |
|
for file in "$SOURCE_DIR"/metadata/*.json*; do |
|
if [[ ! $file =~ _index.json$ ]]; then |
|
cp "$file" "$TARGET_DIR/metadata/" |
|
fi |
|
done |
|
|
|
|
|
process_chunks() { |
|
local type=$1 |
|
local src_dir="$SOURCE_DIR/$type" |
|
local target_dir="$TARGET_DIR/$type" |
|
|
|
echo "Processing $type chunks..." |
|
|
|
|
|
if [ ! -d "$src_dir" ]; then |
|
echo "Warning: Directory not found: $src_dir" |
|
return |
|
} |
|
|
|
|
|
total_chunks=$(ls "$src_dir"/*.tar.gz 2>/dev/null | wc -l) |
|
if [ "$total_chunks" -eq 0 ]; then |
|
echo "No chunks found in $src_dir" |
|
return |
|
} |
|
|
|
|
|
current=0 |
|
for chunk in "$src_dir"/*.tar.gz; do |
|
current=$((current + 1)) |
|
chunk_name=$(basename "$chunk") |
|
printf "Extracting %s (%d/%d)..." "$chunk_name" "$current" "$total_chunks" |
|
|
|
if tar -xzf "$chunk" -C "$target_dir" 2>/dev/null; then |
|
echo " done" |
|
else |
|
echo " failed" |
|
echo "Warning: Failed to extract $chunk_name" |
|
fi |
|
done |
|
} |
|
|
|
|
|
if [ -n "$CHUNK_TYPE" ]; then |
|
case "$CHUNK_TYPE" in |
|
scenes|furnitures|queries) |
|
process_chunks "$CHUNK_TYPE" |
|
;; |
|
*) |
|
echo "Error: Invalid chunk type: $CHUNK_TYPE" |
|
echo "Valid types are: scenes, furnitures, queries" |
|
exit 1 |
|
;; |
|
esac |
|
else |
|
|
|
for type in scenes furnitures queries; do |
|
process_chunks "$type" |
|
done |
|
fi |
|
|
|
|
|
echo -e "\nValidating extracted files..." |
|
|
|
|
|
if [ -z "$CHUNK_TYPE" ] || [ "$CHUNK_TYPE" = "scenes" ]; then |
|
missing_files=0 |
|
for scene_dir in "$TARGET_DIR"/scenes/*; do |
|
if [ -d "$scene_dir" ]; then |
|
for required in "image.jpg" "annotation.json"; do |
|
if [ ! -f "$scene_dir/$required" ]; then |
|
echo "Warning: Missing $required in $(basename "$scene_dir")" |
|
missing_files=$((missing_files + 1)) |
|
fi |
|
done |
|
fi |
|
done |
|
echo "Scene validation complete. Missing files: $missing_files" |
|
fi |
|
|
|
echo "Dataset uncompression completed!" |
|
echo "Output directory: $TARGET_DIR" |