@@ -29,14 +29,14 @@ cd milk_pouch_project
2929# NOTE: Adjust the grep pattern if other image types are expected.
3030echo " === DEBUGGING START ==="
3131echo " DEBUG: gcs_path variable is: '${gcs_path} '"
32- echo " DEBUG: Running 'gsutil ls \ "${gcs_path} \ " ' to check accessibility:"
33- gsutil ls " ${gcs_path} " || echo " ❌ gsutil ls failed"
34- echo " DEBUG: Running 'gsutil ls -r \ "${gcs_path} \ " | head -n 10' to check content:"
35- gsutil ls -r " ${gcs_path} " | head -n 10 || echo " ❌ gsutil recursive ls failed"
32+ echo " DEBUG: Running 'gcloud storage ls " ${gcs_path} " ' to check accessibility:"
33+ gcloud storage ls " ${gcs_path} " || echo " ❌ gsutil ls failed"
34+ echo " DEBUG: Running 'gcloud storage ls --recursive " ${gcs_path} " | head -n 10' to check content:"
35+ gcloud storage ls --recursive " ${gcs_path} " | head -n 10 || echo " ❌ gsutil recursive ls failed"
3636echo " === DEBUGGING END ==="
3737
3838echo " 🖨️ Listing image files from GCS bucket: $gcs_path "
39- mapfile -t all_gcs_files < <( gsutil ls -r " ${gcs_path} " | grep -iE ' \.(png|jpg|jpeg)$' | grep -v " /predictions/" | grep -v " /processed/" )
39+ mapfile -t all_gcs_files < <( gcloud storage ls --recursive " ${gcs_path} " | grep -iE ' \.(png|jpg|jpeg)$' | grep -v " /predictions/" | grep -v " /processed/" )
4040num_files=${# all_gcs_files[@]}
4141
4242if (( num_files == 0 )) ; then
@@ -77,7 +77,7 @@ for (( i=0; i<num_files; i+=batch_size )); do
7777
7878 # Copy current batch files from GCS
7979 echo " 🖨️ Copying $num_in_batch files from GCS to input_images/..."
80- gsutil -m cp " ${current_batch[@]} " input_images/
80+ gcloud storage cp " ${current_batch[@]} " input_images/
8181
8282 # Extract objects
8383 echo " 🔎 Extracting objects from images..."
@@ -96,7 +96,7 @@ for (( i=0; i<num_files; i+=batch_size )); do
9696 # Move predictions back to GCS
9797 if [ -d " predictions" ] && [ -n " $( find predictions -type f -print -quit) " ]; then
9898 echo " 🖨️ Moving predictions for this batch back to GCS bucket: $gcs_path "
99- gsutil -m cp -r predictions/ " $gcs_path "
99+ gcloud storage cp --recursive predictions/ " $gcs_path "
100100 else
101101 echo " ⚠️ No predictions generated for this batch."
102102 fi
@@ -109,7 +109,7 @@ for (( i=0; i<num_files; i+=batch_size )); do
109109
110110 target_root=" ${clean_gcs_path} processed/"
111111
112- # Group files by their destination directory to optimize gsutil calls
112+ # Group files by their destination directory to optimize gcloud storage calls
113113 declare -a current_move_batch
114114 current_move_dir=" "
115115
@@ -128,7 +128,7 @@ for (( i=0; i<num_files; i+=batch_size )); do
128128 # If the destination directory changes, flush the current batch
129129 if [[ " $dest_dir " != " $current_move_dir " ]]; then
130130 if (( ${# current_move_batch[@]} > 0 )) ; then
131- gsutil -m mv " ${current_move_batch[@]} " " $current_move_dir "
131+ gcloud storage mv " ${current_move_batch[@]} " " $current_move_dir "
132132 current_move_batch=()
133133 fi
134134 current_move_dir=" $dest_dir "
@@ -138,7 +138,7 @@ for (( i=0; i<num_files; i+=batch_size )); do
138138
139139 # Flush any remaining files
140140 if (( ${# current_move_batch[@]} > 0 )) ; then
141- gsutil -m mv " ${current_move_batch[@]} " " $current_move_dir "
141+ gcloud storage mv " ${current_move_batch[@]} " " $current_move_dir "
142142 fi
143143
144144 unset current_move_batch
0 commit comments