Skip to content

Commit 8f796db

Browse files
committed
Add tests for GLCM texture metrics (#963)
29 tests covering validation, quantization, known-value checks, NaN handling, edge cases, all six metrics, angle averaging, and cross-backend consistency (numpy vs dask, cupy, dask+cupy).
1 parent 9112fd8 commit 8f796db

2 files changed

Lines changed: 361 additions & 39 deletions

File tree

xrspatial/glcm.py

Lines changed: 34 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -338,33 +338,30 @@ def _glcm_dask_numpy(agg, metrics, window_size, levels, distance, angle):
338338
raise ImportError("dask is required for the dask+numpy backend")
339339

340340
data = agg.data.astype(np.float64)
341-
half = window_size // 2
342-
# The kernel reads pixels up to half + distance from the center
343-
depth = half + distance
344-
n_metrics = len(metrics)
341+
depth = window_size // 2 + distance
345342

346343
# Global min/max for consistent quantization across chunks
347344
dmin = float(da.nanmin(data))
348345
dmax = float(da.nanmax(data))
349346

350-
# Quantize globally (element-wise, so dask handles it lazily)
351347
quantized = _dask_quantize(data, levels, dmin, dmax)
352348

353-
# Overlap and compute GLCM per chunk
354-
padded = da.overlap.overlap(quantized, depth={0: depth, 1: depth},
355-
boundary=-1)
349+
# Compute each metric individually via map_overlap then stack
350+
layers = []
351+
for m in metrics:
352+
single = [m]
356353

357-
def _chunk_func(block):
358-
result = _run_glcm_on_quantized(block, metrics, window_size,
359-
levels, distance, angle)
360-
return result[:, depth:-depth, depth:-depth]
354+
def _chunk_func(block, _single=single):
355+
return _run_glcm_on_quantized(block, _single, window_size,
356+
levels, distance, angle)[0]
361357

362-
result = padded.map_blocks(
363-
_chunk_func,
364-
dtype=np.float64,
365-
new_axis=0,
366-
chunks=((n_metrics,),) + quantized.chunks,
367-
)
358+
layer = da.map_overlap(
359+
_chunk_func, quantized,
360+
depth=depth, boundary=-1, dtype=np.float64,
361+
)
362+
layers.append(layer)
363+
364+
result = da.stack(layers, axis=0)
368365

369366
coords = dict(agg.coords)
370367
dims = ('metric',) + agg.dims
@@ -413,32 +410,30 @@ def _glcm_dask_cupy(agg, metrics, window_size, levels, distance, angle):
413410
raise ImportError("dask is required for the dask+cupy backend")
414411

415412
data = agg.data
416-
half = window_size // 2
417-
depth = half + distance
418-
n_metrics = len(metrics)
413+
depth = window_size // 2 + distance
419414

420-
# Global min/max
421415
dmin = float(da.nanmin(data))
422416
dmax = float(da.nanmax(data))
423417

424-
# Quantize globally on GPU, then overlap
425418
quantized = _dask_quantize(data, levels, dmin, dmax)
426-
padded = da.overlap.overlap(quantized, depth={0: depth, 1: depth},
427-
boundary=-1)
428-
429-
def _chunk_func(block):
430-
block_np = cupy.asnumpy(block)
431-
result = _run_glcm_on_quantized(block_np, metrics, window_size,
432-
levels, distance, angle)
433-
result = result[:, depth:-depth, depth:-depth]
434-
return cupy.asarray(result)
435-
436-
result = padded.map_blocks(
437-
_chunk_func,
438-
dtype=np.float64,
439-
new_axis=0,
440-
chunks=((n_metrics,),) + quantized.chunks,
441-
)
419+
420+
layers = []
421+
for m in metrics:
422+
single = [m]
423+
424+
def _chunk_func(block, _single=single):
425+
block_np = cupy.asnumpy(block)
426+
result = _run_glcm_on_quantized(block_np, _single, window_size,
427+
levels, distance, angle)[0]
428+
return cupy.asarray(result)
429+
430+
layer = da.map_overlap(
431+
_chunk_func, quantized,
432+
depth=depth, boundary=-1, dtype=np.float64,
433+
)
434+
layers.append(layer)
435+
436+
result = da.stack(layers, axis=0)
442437

443438
coords = dict(agg.coords)
444439
dims = ('metric',) + agg.dims

0 commit comments

Comments
 (0)