@@ -164,7 +164,7 @@ def for_each_chunking_advanced(ds: Dataset, view: View) -> None:
164
164
.get_view (absolute_offset = offset , size = size )
165
165
)
166
166
chunk_data = chunk .read ()
167
- assert np .array_equal (
167
+ np .testing . assert_array_equal (
168
168
np .ones (chunk_data .shape , dtype = np .dtype ("uint8" ))
169
169
* (sum (chunk .bounding_box .topleft ) % 256 ),
170
170
chunk_data ,
@@ -566,7 +566,7 @@ def test_view_write(data_format: DataFormat, output_path: UPath) -> None:
566
566
wk_view .write (write_data , allow_unaligned = True )
567
567
568
568
data = wk_view .read (absolute_offset = (0 , 0 , 0 ), size = (10 , 10 , 10 ))
569
- assert np .array_equal (data , write_data )
569
+ np .testing . assert_array_equal (data , write_data )
570
570
571
571
572
572
@pytest .mark .parametrize ("output_path" , [TESTOUTPUT_DIR , REMOTE_TESTOUTPUT_DIR ])
@@ -581,13 +581,13 @@ def test_direct_zarr_access(output_path: UPath, data_format: DataFormat) -> None
581
581
write_data = (np .random .rand (3 , 10 , 10 , 10 ) * 255 ).astype (np .uint8 )
582
582
mag .get_zarr_array ()[:, 0 :10 , 0 :10 , 0 :10 ].write (write_data ).result ()
583
583
data = mag .read (absolute_offset = (0 , 0 , 0 ), size = (10 , 10 , 10 ))
584
- assert np .array_equal (data , write_data )
584
+ np .testing . assert_array_equal (data , write_data )
585
585
586
586
# write: wk, read: zarr
587
587
write_data = (np .random .rand (3 , 10 , 10 , 10 ) * 255 ).astype (np .uint8 )
588
588
mag .write (write_data , absolute_offset = (0 , 0 , 0 ), allow_unaligned = True )
589
589
data = mag .get_zarr_array ()[:, 0 :10 , 0 :10 , 0 :10 ].read ().result ()
590
- assert np .array_equal (data , write_data )
590
+ np .testing . assert_array_equal (data , write_data )
591
591
592
592
593
593
@pytest .mark .parametrize ("data_format,output_path" , DATA_FORMATS_AND_OUTPUT_PATHS )
@@ -852,7 +852,7 @@ def test_write_multi_channel_uint8(data_format: DataFormat, output_path: UPath)
852
852
853
853
mag .write (data , allow_resize = True )
854
854
855
- assert np .array_equal (data , mag .read ())
855
+ np .testing . assert_array_equal (data , mag .read ())
856
856
857
857
assure_exported_properties (ds )
858
858
@@ -876,7 +876,7 @@ def test_wkw_write_multi_channel_uint16(
876
876
mag .write (data , allow_resize = True )
877
877
written_data = mag .read ()
878
878
879
- assert np .array_equal (data , written_data )
879
+ np .testing . assert_array_equal (data , written_data )
880
880
881
881
assure_exported_properties (ds )
882
882
@@ -993,7 +993,7 @@ def test_read_padded_data(data_format: DataFormat, output_path: UPath) -> None:
993
993
data = mag .read (absolute_offset = (0 , 0 , 0 ), size = (10 , 10 , 10 ))
994
994
995
995
assert data .shape == (3 , 10 , 10 , 10 )
996
- assert np .array_equal (data , np .zeros ((3 , 10 , 10 , 10 )))
996
+ np .testing . assert_array_equal (data , np .zeros ((3 , 10 , 10 , 10 )))
997
997
998
998
999
999
@pytest .mark .parametrize ("data_format,output_path" , DATA_FORMATS_AND_OUTPUT_PATHS )
@@ -1272,7 +1272,7 @@ def test_chunking_wk(data_format: DataFormat, output_path: UPath) -> None:
1272
1272
chunk_shape = shard_shape ,
1273
1273
executor = executor ,
1274
1274
)
1275
- assert np .array_equal (original_data + 50 , mag .get_view ().read ()[0 ])
1275
+ np .testing . assert_array_equal (original_data + 50 , mag .get_view ().read ()[0 ])
1276
1276
1277
1277
# Reset the data
1278
1278
mag .write (absolute_offset = (70 , 80 , 90 ), data = original_data , allow_resize = True )
@@ -1282,7 +1282,7 @@ def test_chunking_wk(data_format: DataFormat, output_path: UPath) -> None:
1282
1282
chunk_job ,
1283
1283
chunk_shape = shard_shape ,
1284
1284
)
1285
- assert np .array_equal (original_data + 50 , mag .get_view ().read ()[0 ])
1285
+ np .testing . assert_array_equal (original_data + 50 , mag .get_view ().read ()[0 ])
1286
1286
1287
1287
assure_exported_properties (ds )
1288
1288
@@ -1410,7 +1410,7 @@ def test_changing_layer_bounding_box(
1410
1410
assert tuple (bbox_size ) == (12 , 12 , 10 )
1411
1411
less_data = mag .read (absolute_offset = (0 , 0 , 0 ), size = bbox_size )
1412
1412
assert less_data .shape == (3 , 12 , 12 , 10 )
1413
- assert np .array_equal (original_data [:, :12 , :12 , :10 ], less_data )
1413
+ np .testing . assert_array_equal (original_data [:, :12 , :12 , :10 ], less_data )
1414
1414
1415
1415
layer .bounding_box = layer .bounding_box .with_size (
1416
1416
[36 , 48 , 60 ]
@@ -1420,7 +1420,7 @@ def test_changing_layer_bounding_box(
1420
1420
assert tuple (bbox_size ) == (36 , 48 , 60 )
1421
1421
more_data = mag .read (absolute_offset = (0 , 0 , 0 ), size = bbox_size )
1422
1422
assert more_data .shape == (3 , 36 , 48 , 60 )
1423
- assert np .array_equal (more_data [:, :24 , :24 , :24 ], original_data )
1423
+ np .testing . assert_array_equal (more_data [:, :24 , :24 , :24 ], original_data )
1424
1424
1425
1425
assert tuple (ds .get_layer ("color" ).bounding_box .topleft ) == (0 , 0 , 0 )
1426
1426
@@ -1432,12 +1432,12 @@ def test_changing_layer_bounding_box(
1432
1432
new_bbox_size = ds .get_layer ("color" ).bounding_box .size
1433
1433
assert tuple (new_bbox_offset ) == (10 , 10 , 0 )
1434
1434
assert tuple (new_bbox_size ) == (14 , 14 , 24 )
1435
- assert np .array_equal (
1435
+ np .testing . assert_array_equal (
1436
1436
original_data ,
1437
1437
mag .read (absolute_offset = (0 , 0 , 0 ), size = mag .bounding_box .bottomright ),
1438
1438
)
1439
1439
1440
- assert np .array_equal (
1440
+ np .testing . assert_array_equal (
1441
1441
original_data [:, 10 :, 10 :, :],
1442
1442
mag .read (absolute_offset = (10 , 10 , 0 ), size = (14 , 14 , 24 )),
1443
1443
)
@@ -1447,7 +1447,7 @@ def test_changing_layer_bounding_box(
1447
1447
layer .bounding_box = BoundingBox ((0 , 0 , 0 ), new_bbox_size )
1448
1448
new_data = mag .read ()
1449
1449
assert new_data .shape == (3 , 14 , 14 , 24 )
1450
- assert np .array_equal (original_data [:, :14 , :14 , :], new_data )
1450
+ np .testing . assert_array_equal (original_data [:, :14 , :14 , :], new_data )
1451
1451
1452
1452
assure_exported_properties (ds )
1453
1453
@@ -1635,11 +1635,11 @@ def test_writing_subset_of_compressed_data_multi_channel(
1635
1635
view .write (relative_offset = (10 , 20 , 30 ), data = write_data2 )
1636
1636
view .write (relative_offset = (10 , 20 , 30 ), data = write_data2 , allow_unaligned = True )
1637
1637
1638
- assert np .array_equal (
1638
+ np .testing . assert_array_equal (
1639
1639
write_data2 ,
1640
1640
compressed_mag .read (relative_offset = (60 , 80 , 100 ), size = (10 , 10 , 10 )),
1641
1641
) # the new data was written
1642
- assert np .array_equal (
1642
+ np .testing . assert_array_equal (
1643
1643
write_data1 [:, :60 , :80 , :100 ],
1644
1644
compressed_mag .read (relative_offset = (0 , 0 , 0 ), size = (60 , 80 , 100 )),
1645
1645
) # the old data is still there
@@ -1681,11 +1681,11 @@ def test_writing_subset_of_compressed_data_single_channel(
1681
1681
view .write (relative_offset = (10 , 20 , 30 ), data = write_data2 )
1682
1682
view .write (relative_offset = (10 , 20 , 30 ), data = write_data2 , allow_unaligned = True )
1683
1683
1684
- assert np .array_equal (
1684
+ np .testing . assert_array_equal (
1685
1685
write_data2 ,
1686
1686
compressed_mag .read (absolute_offset = (60 , 80 , 100 ), size = (10 , 10 , 10 ))[0 ],
1687
1687
) # the new data was written
1688
- assert np .array_equal (
1688
+ np .testing . assert_array_equal (
1689
1689
write_data1 [:60 , :80 , :100 ],
1690
1690
compressed_mag .read (absolute_offset = (0 , 0 , 0 ), size = (60 , 80 , 100 ))[0 ],
1691
1691
) # the old data is still there
@@ -1861,7 +1861,7 @@ def test_add_layer_as_ref(data_format: DataFormat, output_path: UPath) -> None:
1861
1861
(np .random .rand (3 , 10 , 10 , 10 ) * 255 ).astype (np .uint8 ), allow_unaligned = True
1862
1862
)
1863
1863
1864
- assert np .array_equal (
1864
+ np .testing . assert_array_equal (
1865
1865
mag .read (absolute_offset = (0 , 0 , 0 ), size = (10 , 10 , 10 )),
1866
1866
original_mag .read (absolute_offset = (0 , 0 , 0 ), size = (10 , 10 , 10 )),
1867
1867
)
@@ -1963,7 +1963,7 @@ def test_add_mag_as_ref(data_format: DataFormat, output_path: UPath) -> None:
1963
1963
assert not layer .get_mag (1 ).read_only
1964
1964
assert ref_mag_2 .read_only
1965
1965
1966
- assert np .array_equal (
1966
+ np .testing . assert_array_equal (
1967
1967
ref_mag_2 .read (absolute_offset = (0 , 0 , 0 ), size = (10 , 10 , 10 ))[0 ],
1968
1968
original_layer .get_mag (2 ).read (absolute_offset = (0 , 0 , 0 ), size = (10 , 10 , 10 ))[0 ],
1969
1969
)
@@ -2051,10 +2051,10 @@ def test_add_mag_as_copy(data_format: DataFormat, output_path: UPath) -> None:
2051
2051
allow_unaligned = True ,
2052
2052
)
2053
2053
2054
- assert np .array_equal (
2054
+ np .testing . assert_array_equal (
2055
2055
copy_mag .read (absolute_offset = (0 , 0 , 0 ), size = (5 , 5 , 5 ))[0 ], new_data
2056
2056
)
2057
- assert np .array_equal (original_mag .read ()[0 ], original_data )
2057
+ np .testing . assert_array_equal (original_mag .read ()[0 ], original_data )
2058
2058
2059
2059
assure_exported_properties (original_ds )
2060
2060
assure_exported_properties (copy_ds )
@@ -2100,10 +2100,10 @@ def test_add_fs_copy_mag(data_format: DataFormat, output_path: UPath) -> None:
2100
2100
allow_unaligned = True ,
2101
2101
)
2102
2102
2103
- assert np .array_equal (
2103
+ np .testing . assert_array_equal (
2104
2104
copy_mag .read (absolute_offset = (0 , 0 , 0 ), size = (5 , 5 , 5 ))[0 ], new_data
2105
2105
)
2106
- assert np .array_equal (original_mag .read ()[0 ], original_data )
2106
+ np .testing . assert_array_equal (original_mag .read ()[0 ], original_data )
2107
2107
2108
2108
assure_exported_properties (original_ds )
2109
2109
assure_exported_properties (copy_ds )
@@ -2127,7 +2127,7 @@ def test_search_dataset_also_for_long_layer_name(
2127
2127
write_data = (np .random .rand (10 , 10 , 10 ) * 255 ).astype (np .uint8 )
2128
2128
mag .write (write_data , absolute_offset = (20 , 20 , 20 ), allow_resize = True )
2129
2129
2130
- assert np .array_equal (
2130
+ np .testing . assert_array_equal (
2131
2131
mag .read (absolute_offset = (20 , 20 , 20 ), size = (20 , 20 , 20 )),
2132
2132
np .expand_dims (write_data , 0 ),
2133
2133
)
@@ -2146,7 +2146,7 @@ def test_search_dataset_also_for_long_layer_name(
2146
2146
# when opening the dataset, it searches both for the long and the short path
2147
2147
layer = Dataset .open (ds_path ).get_layer ("color" )
2148
2148
mag = layer .get_mag ("2" )
2149
- assert np .array_equal (
2149
+ np .testing . assert_array_equal (
2150
2150
mag .read (absolute_offset = (20 , 20 , 20 ), size = (20 , 20 , 20 )),
2151
2151
np .expand_dims (write_data , 0 ),
2152
2152
)
@@ -2318,7 +2318,7 @@ def test_dataset_conversion_wkw_only() -> None:
2318
2318
assert origin_info .compression_mode == converted_info .compression_mode
2319
2319
assert origin_info .chunk_shape == converted_info .chunk_shape
2320
2320
assert origin_info .data_format == converted_info .data_format
2321
- assert np .array_equal (
2321
+ np .testing . assert_array_equal (
2322
2322
origin_ds .layers [layer_name ].mags [mag ].read (),
2323
2323
converted_ds .layers [layer_name ].mags [mag ].read (),
2324
2324
)
@@ -2412,7 +2412,7 @@ def test_for_zipped_chunks(data_format: DataFormat) -> None:
2412
2412
executor = executor ,
2413
2413
)
2414
2414
2415
- assert np .array_equal (
2415
+ np .testing . assert_array_equal (
2416
2416
source_view .read () + 50 ,
2417
2417
target_view .read (),
2418
2418
)
@@ -2470,6 +2470,25 @@ def test_for_zipped_chunks_invalid_target_chunk_shape_wk(
2470
2470
assure_exported_properties (ds )
2471
2471
2472
2472
2473
+ @pytest .mark .parametrize ("output_path" , OUTPUT_PATHS )
2474
+ def test_invalid_chunk_shard_shape (output_path : UPath ) -> None :
2475
+ ds_path = prepare_dataset_path (
2476
+ DEFAULT_DATA_FORMAT , output_path , "invalid_chunk_shape"
2477
+ )
2478
+ ds = Dataset (ds_path , voxel_size = (1 , 1 , 1 ))
2479
+ layer = ds .add_layer ("color" , COLOR_CATEGORY , data_format = DEFAULT_DATA_FORMAT )
2480
+
2481
+ with pytest .raises (ValueError , match = ".*must be a power of two.*" ):
2482
+ layer .add_mag ("1" , chunk_shape = (3 , 4 , 4 ))
2483
+
2484
+ with pytest .raises (ValueError , match = ".*must be a multiple.*" ):
2485
+ layer .add_mag ("1" , chunk_shape = (16 , 16 , 16 ), shard_shape = (8 , 16 , 16 ))
2486
+
2487
+ with pytest .raises (ValueError , match = ".*must be a multiple.*" ):
2488
+ # also not a power-of-two shard shape
2489
+ layer .add_mag ("1" , chunk_shape = (16 , 16 , 16 ), shard_shape = (53 , 16 , 16 ))
2490
+
2491
+
2473
2492
@pytest .mark .parametrize ("data_format,output_path" , DATA_FORMATS_AND_OUTPUT_PATHS )
2474
2493
def test_read_only_view (data_format : DataFormat , output_path : UPath ) -> None :
2475
2494
ds_path = prepare_dataset_path (data_format , output_path , "read_only_view" )
@@ -2590,12 +2609,69 @@ def test_compression(data_format: DataFormat, output_path: UPath) -> None:
2590
2609
)
2591
2610
mag1 = Dataset .open (compressed_dataset_path ).get_layer ("color" ).get_mag (1 )
2592
2611
else :
2593
- mag1 .compress ()
2612
+ with get_executor ("sequential" ) as executor :
2613
+ mag1 .compress (executor = executor )
2594
2614
2595
2615
assert mag1 ._is_compressed ()
2596
2616
assert mag1 .info .data_format == data_format
2597
2617
2598
- assert np .array_equal (
2618
+ np .testing .assert_array_equal (
2619
+ write_data , mag1 .read (absolute_offset = (60 , 80 , 100 ), size = (10 , 20 , 30 ))
2620
+ )
2621
+
2622
+ # writing unaligned data to a compressed dataset works because the data gets padded, but it prints a warning
2623
+ mag1 .write (
2624
+ (np .random .rand (3 , 10 , 20 , 30 ) * 255 ).astype (np .uint8 ), allow_resize = True
2625
+ )
2626
+
2627
+ assure_exported_properties (mag1 .layer .dataset )
2628
+
2629
+
2630
+ @pytest .mark .parametrize ("data_format,output_path" , DATA_FORMATS_AND_OUTPUT_PATHS )
2631
+ def test_rechunking (data_format : DataFormat , output_path : UPath ) -> None :
2632
+ new_dataset_path = prepare_dataset_path (data_format , output_path )
2633
+ ds = Dataset (new_dataset_path , voxel_size = (2 , 2 , 1 ))
2634
+ mag1 = ds .add_layer (
2635
+ "color" , COLOR_CATEGORY , num_channels = 3 , data_format = data_format
2636
+ ).add_mag (
2637
+ 1 ,
2638
+ compress = False ,
2639
+ chunk_shape = (16 , 16 , 16 ),
2640
+ shard_shape = (16 , 16 , 16 ) if data_format == DataFormat .Zarr else (64 , 64 , 64 ),
2641
+ )
2642
+
2643
+ # writing unaligned data to an uncompressed dataset
2644
+ write_data = (np .random .rand (3 , 10 , 20 , 30 ) * 255 ).astype (np .uint8 )
2645
+ mag1 .write (write_data , absolute_offset = (60 , 80 , 100 ), allow_resize = True )
2646
+
2647
+ assert not mag1 ._is_compressed ()
2648
+
2649
+ if output_path == REMOTE_TESTOUTPUT_DIR :
2650
+ # Remote datasets require a `target_path` for rechunking
2651
+ with pytest .raises (AssertionError ):
2652
+ mag1 .rechunk ()
2653
+
2654
+ compressed_dataset_path = (
2655
+ REMOTE_TESTOUTPUT_DIR / f"simple_{ data_format } _dataset_compressed"
2656
+ )
2657
+ with pytest .warns (UserWarning , match = ".*can be slow.*" ):
2658
+ mag1 .rechunk (
2659
+ target_path = compressed_dataset_path ,
2660
+ )
2661
+ mag1 = Dataset .open (compressed_dataset_path ).get_layer ("color" ).get_mag (1 )
2662
+ else :
2663
+ with get_executor ("sequential" ) as executor :
2664
+ mag1 .rechunk (executor = executor )
2665
+
2666
+ assert mag1 .info .data_format == data_format
2667
+ assert mag1 ._is_compressed ()
2668
+ assert mag1 .info .chunk_shape == Vec3Int .full (32 )
2669
+ if data_format == DataFormat .Zarr :
2670
+ assert mag1 .info .shard_shape == Vec3Int .full (32 )
2671
+ else :
2672
+ assert mag1 .info .shard_shape == Vec3Int .full (1024 )
2673
+
2674
+ np .testing .assert_array_equal (
2599
2675
write_data , mag1 .read (absolute_offset = (60 , 80 , 100 ), size = (10 , 20 , 30 ))
2600
2676
)
2601
2677
@@ -2816,7 +2892,7 @@ def test_read_bbox() -> None:
2816
2892
allow_resize = True ,
2817
2893
)
2818
2894
2819
- assert np .array_equal (
2895
+ np .testing . assert_array_equal (
2820
2896
mag .read (absolute_offset = (20 , 30 , 40 ), size = (40 , 50 , 60 )),
2821
2897
mag .read (
2822
2898
absolute_bounding_box = BoundingBox (topleft = (20 , 30 , 40 ), size = (40 , 50 , 60 ))
@@ -2863,7 +2939,7 @@ def test_add_layer_as_copy(data_format: DataFormat, output_path: UPath) -> None:
2863
2939
assert color_layer .mags .keys () == original_color_layer .mags .keys ()
2864
2940
assert len (color_layer .mags .keys ()) >= 1
2865
2941
for mag in color_layer .mags .keys ():
2866
- assert np .array_equal (
2942
+ np .testing . assert_array_equal (
2867
2943
color_layer .get_mag (mag ).read (), original_color_layer .get_mag (mag ).read ()
2868
2944
)
2869
2945
# Test if the copied layer contains actual data
@@ -2912,7 +2988,7 @@ def test_rename_layer(data_format: DataFormat, output_path: UPath) -> None:
2912
2988
assert ds .get_layer ("color2" ).data_format == data_format
2913
2989
2914
2990
# The "mag" object which was created before renaming the layer is still valid
2915
- assert np .array_equal (mag .read ()[0 ], write_data )
2991
+ np .testing . assert_array_equal (mag .read ()[0 ], write_data )
2916
2992
2917
2993
assure_exported_properties (ds )
2918
2994
@@ -3107,7 +3183,7 @@ def test_pickle_view() -> None:
3107
3183
3108
3184
# Make sure that the pickled mag can still read data
3109
3185
assert pickled_mag1 ._cached_array is None
3110
- assert np .array_equal (
3186
+ np .testing . assert_array_equal (
3111
3187
data_to_write ,
3112
3188
pickled_mag1 .read (relative_offset = (0 , 0 , 0 ), size = data_to_write .shape [- 3 :]),
3113
3189
)
0 commit comments