66import numpy as np
77import pytest
88import xarray as xr
9+ from dask .array .core import PerformanceWarning
910from numpy .testing import assert_allclose , assert_almost_equal , assert_equal
1011from shapely import segmentize
1112from shapely .geometry import MultiPolygon , Polygon
1415from xesmf .frontend import as_2d_mesh
1516
1617dask_schedulers = ['threaded_scheduler' , 'processes_scheduler' , 'distributed_scheduler' ]
17-
18+ pytestmark = pytest . mark . filterwarnings ( 'ignore:Input array is not C_CONTIGUOUS' )
1819
1920# same test data as test_backend.py, but here we can use xarray DataSet
2021ds_in = xe .util .grid_global (20 , 12 )
5657 },
5758 data_vars = {'abc' : (('lon' , 'lat' ), [[1.0 , 2.0 ], [3.0 , 4.0 ], [2.0 , 4.0 ]])},
5859)
59- polys = [
60+ polys_raw = [
6061 Polygon ([[0.5 , 0.5 ], [0.5 , 1.5 ], [1.5 , 0.5 ]]), # Simple triangle polygon
6162 MultiPolygon (
6263 [
9495 ),
9596 ], # Combination of Polygon and MultiPolygon with two different areas
9697]
98+
99+
100+ def _segmentize (p ):
101+ if isinstance (p , list ):
102+ return list (map (_segmentize , p ))
103+ return segmentize (p , 1 )
104+
105+
106+ polys = list (map (_segmentize , polys_raw ))
97107exps_polys = [1.75 , 3 , 2.1429 , 4 , 0 , 2.5 , [1.75 , 3.6 ]]
98108
99109
@@ -196,7 +206,7 @@ def test_regridder_w():
196206 w = regridder .w
197207 assert w .shape == ds_out .lon .shape + ds_in .lon .shape
198208
199- p = Polygon ([(- 10 , - 10 ), (10 , - 10 ), (10 , 10 ), (- 10 , 10 )])
209+ p = segmentize ( Polygon ([(- 10 , - 10 ), (10 , - 10 ), (10 , 10 ), (- 10 , 10 )]), 1 )
200210
201211 averager = xe .SpatialAverager (ds_in , [p ])
202212 assert averager .w .shape == (1 ,) + ds_in .lon .shape
@@ -275,12 +285,14 @@ def test_conservative_without_bounds():
275285 xe .Regridder (ds_in .drop_vars ('lon_b' ), ds_out , 'conservative' )
276286
277287
278- def test_build_regridder_from_dict ():
288+ def test_regridder_from_dict ():
279289 lon_in = ds_in ['lon' ].values
280290 lat_in = ds_in ['lat' ].values
281291 lon_out = ds_out ['lon' ].values
282292 lat_out = ds_out ['lat' ].values
283- _ = xe .Regridder ({'lon' : lon_in , 'lat' : lat_in }, {'lon' : lon_out , 'lat' : lat_out }, 'bilinear' )
293+ reg = xe .Regridder ({'lon' : lon_in , 'lat' : lat_in }, {'lon' : lon_out , 'lat' : lat_out }, 'bilinear' )
294+ with pytest .warns (UserWarning , match = r"Using dimensions \('y', 'x'\) from data variable" ):
295+ reg (ds_in ['data' ])
284296
285297
286298def test_regrid_periodic_wrong ():
@@ -357,8 +369,8 @@ def test_regrid_with_1d_grid_infer_bounds():
357369 ds_out_1d = ds_2d_to_1d (ds_out ).swap_dims (x = 'lon' , y = 'lat' )
358370
359371 regridder = xe .Regridder (ds_in_1d , ds_out_1d , 'conservative' , periodic = True )
360-
361- dr_out = regridder (ds_in ['data' ])
372+ with pytest . warns ( UserWarning , match = r"Using dimensions \('y', 'x'\) from data variable" ):
373+ dr_out = regridder (ds_in ['data' ])
362374
363375 # compare with provided-bounds solution
364376 dr_exp = xe .Regridder (ds_in , ds_out , 'conservative' , periodic = True )(ds_in ['data' ])
@@ -433,7 +445,8 @@ def test_regrid_dataarray(use_cfxr):
433445
434446 # test renamed dim
435447 if not use_cfxr :
436- dr_out_rn = regridder (ds_in2 .rename (y = 'why' )['data' ])
448+ with pytest .warns (UserWarning , match = r"Using dimensions \('why', 'x'\)" ):
449+ dr_out_rn = regridder (ds_in2 .rename (y = 'why' )['data' ])
437450 xr .testing .assert_identical (dr_out , dr_out_rn )
438451
439452
@@ -443,14 +456,14 @@ def test_regrid_dataarray_endianess(use_dask):
443456 regridder = xe .Regridder (ds_in , ds_out , 'conservative' )
444457
445458 exp = regridder (ds_in ['data' ]) # Normal (little-endian)
446- # with pytest.warns(UserWarning, match='Input array has a dtype not supported'):
447459
448460 if use_dask :
449461 indata = ds_in .data .astype ('>f8' ).chunk ()
450462 else :
451463 indata = ds_in .data .astype ('>f8' )
452464
453- out = regridder (indata ) # big endian
465+ with pytest .warns (UserWarning , match = 'Input array has a dtype not supported' ):
466+ out = regridder (indata ) # big endian
454467
455468 # Results should be the same
456469 assert_equal (exp .values , out .values )
@@ -525,7 +538,11 @@ def test_regrid_dask(request, scheduler):
525538
526539 # Use very small chunks
527540 indata_chunked = indata .rechunk ((5 , 6 )) # Now has 9 chunks (5, 6)
528- outdata = regridder (indata_chunked )
541+ with pytest .warns (
542+ PerformanceWarning ,
543+ match = r'Regridding is increasing the number of chunks by a factor of 16.0' ,
544+ ):
545+ outdata = regridder (indata_chunked )
529546 # This is the case where we preserve chunk size
530547 assert outdata .chunksize == indata_chunked .chunksize
531548 n_task_out = len (outdata .__dask_graph__ ().keys ())
@@ -625,7 +642,7 @@ def test_dask_output_chunks():
625642 test_output_chunks_dict = {'y' : 10 , 'x' : 12 }
626643 indata = ds_spatial_chunked ['data4D' ].data # Data chunked along spatial dims
627644 # Use ridiculous small chunk size value to be sure it _isn't_ impacting computation.
628- with dask .config .set ({'array.chunk-size' : '1MiB' }):
645+ with dask .config .set ({'array.chunk-size' : '1MiB' }), pytest . warns ( PerformanceWarning ) :
629646 outdata = regridder (indata )
630647 outdata_spec_tuple = regridder (indata , output_chunks = test_output_chunks_tuple )
631648 outdata_spec_dict = regridder (indata , output_chunks = test_output_chunks_dict )
@@ -844,6 +861,11 @@ def test_spatial_averager(poly, exp, use_dask):
844861 assert 'my_geom' in out .dims
845862
846863
864+ def test_spatial_averager_warns ():
865+ with pytest .warns (UserWarning , match = r'contains large \(> 1°\) segments.' ):
866+ xe .SpatialAverager (ds_savg , [polys_raw [0 ]], geom_dim_name = 'my_geom' )
867+
868+
847869def test_spatial_averager_with_zonal_region ():
848870 # We expect the spatial average for all regions to be one
849871 zonal_south = Polygon ([(0 , - 90 ), (10 , 0 ), (0 , 0 )])
@@ -852,7 +874,7 @@ def test_spatial_averager_with_zonal_region():
852874 zonal_full = Polygon ([(0 , - 90 ), (10 , 0 ), (0 , 90 ), (0 , 0 )]) # This yields 0... why?
853875
854876 polys = [zonal_south , zonal_north , zonal_short , zonal_full ]
855- polys = segmentize (polys , 1 )
877+ polys = segmentize (polys , 0.9 )
856878
857879 # Create field of ones on a global grid
858880 ds = xe .util .grid_global (20 , 12 , cf = True )
@@ -865,6 +887,7 @@ def test_spatial_averager_with_zonal_region():
865887 assert_allclose (out , 1 , rtol = 1e-3 )
866888
867889
890+ @pytest .mark .filterwarnings ('ignore:`polys` contains large' )
868891def test_compare_weights_from_poly_and_grid ():
869892 """Confirm that the weights are identical when they are computed from a grid->grid and grid->poly."""
870893
@@ -1005,10 +1028,3 @@ def test_spatial_averager_mask():
10051028 savg = xe .SpatialAverager (dsm , [poly ], geom_dim_name = 'my_geom' )
10061029 out = savg (dsm .abc )
10071030 assert_allclose (out , 2 , rtol = 1e-3 )
1008-
1009-
1010- def test_densify_polys ():
1011- # Check that using a large poly raises a warning
1012- poly = Polygon ([(- 80 , - 40 ), (80 , - 40 ), (80 , 40 ), (- 80 , 40 )]) # Large poly
1013- with pytest .warns (UserWarning ):
1014- xe .SpatialAverager (ds_in , [poly ])
0 commit comments