Skip to content

Commit 0198687

Browse files
committed
Formatting.
Signed-off-by: Michał Zientkiewicz <mzient@gmail.com>
1 parent fdd2615 commit 0198687

2 files changed

Lines changed: 87 additions & 50 deletions

File tree

docs/examples/image_processing/index.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -79,10 +79,18 @@
7979
doc_entry(
8080
"multiple_crops.ipynb",
8181
[
82-
op_reference("fn.warp_affine", "Multiple crops from a single image", 10),
83-
op_reference("fn.transforms.crop", "Multiple crops from a single image", 10),
84-
op_reference("fn.per_frame", "Multiple crops from a single image", 10),
85-
]
82+
op_reference(
83+
"fn.warp_affine", "Multiple crops from a single image", 10
84+
),
85+
op_reference(
86+
"fn.transforms.crop",
87+
"Multiple crops from a single image",
88+
10,
89+
),
90+
op_reference(
91+
"fn.per_frame", "Multiple crops from a single image", 10
92+
),
93+
],
8694
),
8795
doc_entry(
8896
"resize.ipynb",

docs/examples/image_processing/multiple_crops.ipynb

Lines changed: 75 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333
"import math\n",
3434
"import os\n",
3535
"\n",
36+
"\n",
3637
"def show(images, rows, columns):\n",
3738
" n = len(images)\n",
3839
"\n",
@@ -49,30 +50,30 @@
4950
" continue\n",
5051
" img = np.array(images[i])\n",
5152
" plt.imshow(img)\n",
52-
" \n",
53+
"\n",
5354
"\n",
5455
"def show_seqs(sequences):\n",
5556
" sequences = [np.array(seq) for seq in sequences]\n",
5657
" max_len = 0\n",
5758
" for seq in sequences:\n",
5859
" max_len = max(max_len, seq.shape[0])\n",
59-
" \n",
60-
" if max_len == 0: return\n",
60+
"\n",
61+
" if max_len == 0:\n",
62+
" return\n",
6163
"\n",
6264
" images = []\n",
63-
" \n",
65+
"\n",
6466
" for seq in sequences:\n",
6567
" seq_len = seq.shape[0]\n",
6668
" for i in range(seq_len):\n",
6769
" images.append(seq[i])\n",
6870
" for i in range(seq_len, max_len):\n",
6971
" images.append(None)\n",
70-
" \n",
72+
"\n",
7173
" show(images, len(sequences), max_len)\n",
72-
" \n",
7374
"\n",
74-
"dir = os.path.join(os.environ[\"DALI_EXTRA_PATH\"], 'db', 'single', 'jpeg')\n",
75-
"\n"
75+
"\n",
76+
"dir = os.path.join(os.environ[\"DALI_EXTRA_PATH\"], \"db\", \"single\", \"jpeg\")"
7677
]
7778
},
7879
{
@@ -97,44 +98,67 @@
9798
"source": [
9899
"@pipeline_def(\n",
99100
" batch_size=3,\n",
100-
" num_threads=4, device_id=0,\n",
101+
" num_threads=4,\n",
102+
" device_id=0,\n",
101103
" experimental_exec_dynamic=True, # allows to manipulate shapes of GPU images in a CPU op\n",
102-
" seed=42\n",
104+
" seed=42,\n",
103105
")\n",
104106
"def fixed_size_crops():\n",
105107
" files, labels = fn.readers.file(file_root=dir, random_shuffle=True)\n",
106108
" img = fn.decoders.image(files, device=\"mixed\")\n",
107109
" shape = fn.shapes(img, device=\"cpu\", dtype=dali.types.INT32)\n",
108110
" h, w = shape[0], shape[1]\n",
109111
" # this is fixed for all crops taken from given image\n",
110-
" crop_w = fn.random.uniform(range=fn.stack(w/4, w/2), dtype=dali.types.INT32)\n",
111-
" crop_h = fn.random.uniform(range=fn.stack(h/4, h/2), dtype=dali.types.INT32)\n",
112+
" crop_w = fn.random.uniform(\n",
113+
" range=fn.stack(w / 4, w / 2), dtype=dali.types.INT32\n",
114+
" )\n",
115+
" crop_h = fn.random.uniform(\n",
116+
" range=fn.stack(h / 4, h / 2), dtype=dali.types.INT32\n",
117+
" )\n",
112118
" # the number of crops is random, from 1 to 5\n",
113119
" num_crops = fn.random.uniform(range=(1, 5), dtype=dali.types.INT32)\n",
114120
" # the crop origin is selected so that the crop doesn't go outside the image\n",
115-
" crop_x_range = fn.cast(fn.stack(0, w-crop_w), dtype=dali.types.FLOAT) # stacking different types\n",
116-
" crop_y_range = fn.cast(fn.stack(0, h-crop_h), dtype=dali.types.FLOAT)\n",
121+
" crop_x_range = fn.cast(\n",
122+
" fn.stack(0, w - crop_w), dtype=dali.types.FLOAT\n",
123+
" ) # stacking different types\n",
124+
" crop_y_range = fn.cast(fn.stack(0, h - crop_h), dtype=dali.types.FLOAT)\n",
117125
" crop_param_shape = num_crops[dali.newaxis]\n",
118-
" crop_x = fn.random.uniform(shape=crop_param_shape, range=crop_x_range, dtype=dali.types.INT32)\n",
119-
" crop_y = fn.random.uniform(shape=crop_param_shape, range=crop_y_range, dtype=dali.types.INT32)\n",
120-
" \n",
126+
" crop_x = fn.random.uniform(\n",
127+
" shape=crop_param_shape, range=crop_x_range, dtype=dali.types.INT32\n",
128+
" )\n",
129+
" crop_y = fn.random.uniform(\n",
130+
" shape=crop_param_shape, range=crop_y_range, dtype=dali.types.INT32\n",
131+
" )\n",
132+
"\n",
121133
" # these are in XY order as opposed to tensor shape, which is HW\n",
122134
" crop_start = fn.stack(crop_x, crop_y, axis=1)\n",
123135
" crop_size = fn.stack(crop_w, crop_h, axis=0)[dali.newaxis]\n",
124136
" crop_end = crop_start + crop_size\n",
125-
" \n",
126-
" crop_start = fn.per_frame(crop_start + 0.) # convert to float and mark outermost dimension as\"frame\"\n",
127-
" crop_end = fn.per_frame(crop_end + 0.)\n",
128-
" crop_size = fn.per_frame(crop_size + 0.)\n",
129-
" \n",
130-
" # create a \"crop\" transform matrix - the \"from\" coordinates are the coordinates within the source image\n",
131-
" # the \"to\" coordinates are in the destination image\n",
132-
" mtx = fn.transforms.crop(from_start=crop_start, from_end=crop_end, to_start=0., to_end=crop_size)\n",
133-
" \n",
134-
" crops = fn.warp_affine(img, mtx, size=fn.stack(crop_h, crop_w), interp_type=dali.types.INTERP_NN, inverse_map=False)\n",
135-
" \n",
137+
"\n",
138+
" crop_start = fn.per_frame(\n",
139+
" crop_start + 0.0\n",
140+
" ) # convert to float and mark outermost dimension as\"frame\"\n",
141+
" crop_end = fn.per_frame(crop_end + 0.0)\n",
142+
" crop_size = fn.per_frame(crop_size + 0.0)\n",
143+
"\n",
144+
" # create a \"crop\" transform matrix\n",
145+
" # - the \"from\" coordinates are the coordinates within the source image\n",
146+
" # - the \"to\" coordinates are in the destination image\n",
147+
" mtx = fn.transforms.crop(\n",
148+
" from_start=crop_start, from_end=crop_end, to_start=0.0, to_end=crop_size\n",
149+
" )\n",
150+
"\n",
151+
" crops = fn.warp_affine(\n",
152+
" img,\n",
153+
" mtx,\n",
154+
" size=fn.stack(crop_h, crop_w),\n",
155+
" interp_type=dali.types.INTERP_NN,\n",
156+
" inverse_map=False,\n",
157+
" )\n",
158+
"\n",
136159
" return crops\n",
137160
"\n",
161+
"\n",
138162
"fixed_crop_pipe = fixed_size_crops()\n",
139163
"fixed_crop_pipe.build()"
140164
]
@@ -159,9 +183,8 @@
159183
}
160184
],
161185
"source": [
162-
"crops, = fixed_crop_pipe.run()\n",
163-
"show_seqs(crops.as_cpu())\n",
164-
"\n"
186+
"(crops,) = fixed_crop_pipe.run()\n",
187+
"show_seqs(crops.as_cpu())"
165188
]
166189
},
167190
{
@@ -185,9 +208,10 @@
185208
"source": [
186209
"@pipeline_def(\n",
187210
" batch_size=3,\n",
188-
" num_threads=4, device_id=0,\n",
211+
" num_threads=4,\n",
212+
" device_id=0,\n",
189213
" experimental_exec_dynamic=True, # allows to manipulate shapes of GPU images in a CPU op\n",
190-
" seed=123\n",
214+
" seed=123,\n",
191215
")\n",
192216
"def var_sized_resized_crops():\n",
193217
" files, labels = fn.readers.file(file_root=dir, random_shuffle=True)\n",
@@ -201,22 +225,27 @@
201225
" input_size_wh = fn.stack(w, h)\n",
202226
" crop_size = fn.random.uniform(shape=crop_param_shape, range=(0.25, 0.5))\n",
203227
" crop_start = fn.random.uniform(shape=crop_param_shape, range=(0, 1))\n",
204-
" crop_start *= (1 - crop_size)\n",
205-
" \n",
206-
" output_size = dali.types.Constant([64., 64.])\n",
207-
" \n",
228+
" crop_start *= 1 - crop_size\n",
229+
"\n",
230+
" output_size = dali.types.Constant([64.0, 64.0])\n",
231+
"\n",
208232
" crop_end = crop_start + crop_size\n",
209-
" \n",
210-
" crop_start = fn.per_frame(crop_start * input_size_wh + 0.) # convert to float and mark outermost dimension as\"frame\"\n",
211-
" crop_end = fn.per_frame(crop_end * input_size_wh + 0.)\n",
212-
" crop_size = fn.per_frame(crop_size * input_size_wh + 0.)\n",
213-
" \n",
214-
" mtx = fn.transforms.crop(from_start=crop_start, from_end=crop_end, to_start=0, to_end=output_size)\n",
215-
" \n",
233+
"\n",
234+
" crop_start = fn.per_frame(\n",
235+
" crop_start * input_size_wh + 0.0\n",
236+
" ) # convert to float and mark outermost dimension as\"frame\"\n",
237+
" crop_end = fn.per_frame(crop_end * input_size_wh + 0.0)\n",
238+
" crop_size = fn.per_frame(crop_size * input_size_wh + 0.0)\n",
239+
"\n",
240+
" mtx = fn.transforms.crop(\n",
241+
" from_start=crop_start, from_end=crop_end, to_start=0, to_end=output_size\n",
242+
" )\n",
243+
"\n",
216244
" crops = fn.warp_affine(img, mtx, size=output_size, inverse_map=False)\n",
217-
" \n",
245+
"\n",
218246
" return crops\n",
219247
"\n",
248+
"\n",
220249
"var_crop_pipe = var_sized_resized_crops()\n",
221250
"var_crop_pipe.build()"
222251
]
@@ -248,7 +277,7 @@
248277
"source": [
249278
"crops, *tail = var_crop_pipe.run()\n",
250279
"print(*tail)\n",
251-
"show_seqs(crops.as_cpu())\n"
280+
"show_seqs(crops.as_cpu())"
252281
]
253282
}
254283
],

0 commit comments

Comments
 (0)