Skip to content

Commit 0f1a87d

Browse files
authored
Fix passing grid as dictionary (#429)
* Use dataset instead * Detect or filter almost all warnings - add test for 428 * upd changes
1 parent 8190e15 commit 0f1a87d

File tree

5 files changed

+44
-23
lines changed

5 files changed

+44
-23
lines changed

CHANGES.rst

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,10 @@
11
What's new
22
==========
33

4+
0.8.10 (unreleased)
5+
-------------------
6+
* Fix issue introduced by :pull:`418` for passing grids as dictionaries. (:issue:`428`, :pull:`429`). By `Pascal Bourgault <https://github.com/aulemahal>`_.
7+
48
0.8.9 (2025-04-15)
59
------------------
610
* Destroy grids explicitly once weights are computed. Do not store them in `grid_in` and `grid_out` attributes. This fixes segmentation faults introduced by the memory fix of last version. By `Pascal Bourgault <https://github.com/aulemahal>`_.

xesmf/frontend.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -980,7 +980,7 @@ def __init__(
980980
]
981981
)
982982
else:
983-
self.out_coords = {lat_out.name: lat_out, lon_out.name: lon_out}
983+
self.out_coords = xr.Dataset(coords={lat_out.name: lat_out, lon_out.name: lon_out})
984984

985985
if parallel:
986986
self._init_para_regrid(ds_in, ds_out, kwargs)

xesmf/tests/conftest.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ def processes_scheduler():
1414
yield
1515

1616

17-
@pytest.fixture(scope='module')
17+
@pytest.fixture(scope='function')
1818
def distributed_scheduler():
1919
from dask.distributed import Client, LocalCluster
2020

xesmf/tests/test_frontend.py

Lines changed: 36 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
import numpy as np
77
import pytest
88
import xarray as xr
9+
from dask.array.core import PerformanceWarning
910
from numpy.testing import assert_allclose, assert_almost_equal, assert_equal
1011
from shapely import segmentize
1112
from shapely.geometry import MultiPolygon, Polygon
@@ -14,7 +15,7 @@
1415
from xesmf.frontend import as_2d_mesh
1516

1617
dask_schedulers = ['threaded_scheduler', 'processes_scheduler', 'distributed_scheduler']
17-
18+
pytestmark = pytest.mark.filterwarnings('ignore:Input array is not C_CONTIGUOUS')
1819

1920
# same test data as test_backend.py, but here we can use xarray DataSet
2021
ds_in = xe.util.grid_global(20, 12)
@@ -56,7 +57,7 @@
5657
},
5758
data_vars={'abc': (('lon', 'lat'), [[1.0, 2.0], [3.0, 4.0], [2.0, 4.0]])},
5859
)
59-
polys = [
60+
polys_raw = [
6061
Polygon([[0.5, 0.5], [0.5, 1.5], [1.5, 0.5]]), # Simple triangle polygon
6162
MultiPolygon(
6263
[
@@ -94,6 +95,15 @@
9495
),
9596
], # Combination of Polygon and MultiPolygon with two different areas
9697
]
98+
99+
100+
def _segmentize(p):
101+
if isinstance(p, list):
102+
return list(map(_segmentize, p))
103+
return segmentize(p, 1)
104+
105+
106+
polys = list(map(_segmentize, polys_raw))
97107
exps_polys = [1.75, 3, 2.1429, 4, 0, 2.5, [1.75, 3.6]]
98108

99109

@@ -196,7 +206,7 @@ def test_regridder_w():
196206
w = regridder.w
197207
assert w.shape == ds_out.lon.shape + ds_in.lon.shape
198208

199-
p = Polygon([(-10, -10), (10, -10), (10, 10), (-10, 10)])
209+
p = segmentize(Polygon([(-10, -10), (10, -10), (10, 10), (-10, 10)]), 1)
200210

201211
averager = xe.SpatialAverager(ds_in, [p])
202212
assert averager.w.shape == (1,) + ds_in.lon.shape
@@ -275,12 +285,14 @@ def test_conservative_without_bounds():
275285
xe.Regridder(ds_in.drop_vars('lon_b'), ds_out, 'conservative')
276286

277287

278-
def test_build_regridder_from_dict():
288+
def test_regridder_from_dict():
279289
lon_in = ds_in['lon'].values
280290
lat_in = ds_in['lat'].values
281291
lon_out = ds_out['lon'].values
282292
lat_out = ds_out['lat'].values
283-
_ = xe.Regridder({'lon': lon_in, 'lat': lat_in}, {'lon': lon_out, 'lat': lat_out}, 'bilinear')
293+
reg = xe.Regridder({'lon': lon_in, 'lat': lat_in}, {'lon': lon_out, 'lat': lat_out}, 'bilinear')
294+
with pytest.warns(UserWarning, match=r"Using dimensions \('y', 'x'\) from data variable"):
295+
reg(ds_in['data'])
284296

285297

286298
def test_regrid_periodic_wrong():
@@ -357,8 +369,8 @@ def test_regrid_with_1d_grid_infer_bounds():
357369
ds_out_1d = ds_2d_to_1d(ds_out).swap_dims(x='lon', y='lat')
358370

359371
regridder = xe.Regridder(ds_in_1d, ds_out_1d, 'conservative', periodic=True)
360-
361-
dr_out = regridder(ds_in['data'])
372+
with pytest.warns(UserWarning, match=r"Using dimensions \('y', 'x'\) from data variable"):
373+
dr_out = regridder(ds_in['data'])
362374

363375
# compare with provided-bounds solution
364376
dr_exp = xe.Regridder(ds_in, ds_out, 'conservative', periodic=True)(ds_in['data'])
@@ -433,7 +445,8 @@ def test_regrid_dataarray(use_cfxr):
433445

434446
# test renamed dim
435447
if not use_cfxr:
436-
dr_out_rn = regridder(ds_in2.rename(y='why')['data'])
448+
with pytest.warns(UserWarning, match=r"Using dimensions \('why', 'x'\)"):
449+
dr_out_rn = regridder(ds_in2.rename(y='why')['data'])
437450
xr.testing.assert_identical(dr_out, dr_out_rn)
438451

439452

@@ -443,14 +456,14 @@ def test_regrid_dataarray_endianess(use_dask):
443456
regridder = xe.Regridder(ds_in, ds_out, 'conservative')
444457

445458
exp = regridder(ds_in['data']) # Normal (little-endian)
446-
# with pytest.warns(UserWarning, match='Input array has a dtype not supported'):
447459

448460
if use_dask:
449461
indata = ds_in.data.astype('>f8').chunk()
450462
else:
451463
indata = ds_in.data.astype('>f8')
452464

453-
out = regridder(indata) # big endian
465+
with pytest.warns(UserWarning, match='Input array has a dtype not supported'):
466+
out = regridder(indata) # big endian
454467

455468
# Results should be the same
456469
assert_equal(exp.values, out.values)
@@ -525,7 +538,11 @@ def test_regrid_dask(request, scheduler):
525538

526539
# Use very small chunks
527540
indata_chunked = indata.rechunk((5, 6)) # Now has 9 chunks (5, 6)
528-
outdata = regridder(indata_chunked)
541+
with pytest.warns(
542+
PerformanceWarning,
543+
match=r'Regridding is increasing the number of chunks by a factor of 16.0',
544+
):
545+
outdata = regridder(indata_chunked)
529546
# This is the case where we preserve chunk size
530547
assert outdata.chunksize == indata_chunked.chunksize
531548
n_task_out = len(outdata.__dask_graph__().keys())
@@ -625,7 +642,7 @@ def test_dask_output_chunks():
625642
test_output_chunks_dict = {'y': 10, 'x': 12}
626643
indata = ds_spatial_chunked['data4D'].data # Data chunked along spatial dims
627644
# Use ridiculous small chunk size value to be sure it _isn't_ impacting computation.
628-
with dask.config.set({'array.chunk-size': '1MiB'}):
645+
with dask.config.set({'array.chunk-size': '1MiB'}), pytest.warns(PerformanceWarning):
629646
outdata = regridder(indata)
630647
outdata_spec_tuple = regridder(indata, output_chunks=test_output_chunks_tuple)
631648
outdata_spec_dict = regridder(indata, output_chunks=test_output_chunks_dict)
@@ -844,6 +861,11 @@ def test_spatial_averager(poly, exp, use_dask):
844861
assert 'my_geom' in out.dims
845862

846863

864+
def test_spatial_averager_warns():
865+
with pytest.warns(UserWarning, match=r'contains large \(> 1°\) segments.'):
866+
xe.SpatialAverager(ds_savg, [polys_raw[0]], geom_dim_name='my_geom')
867+
868+
847869
def test_spatial_averager_with_zonal_region():
848870
# We expect the spatial average for all regions to be one
849871
zonal_south = Polygon([(0, -90), (10, 0), (0, 0)])
@@ -852,7 +874,7 @@ def test_spatial_averager_with_zonal_region():
852874
zonal_full = Polygon([(0, -90), (10, 0), (0, 90), (0, 0)]) # This yields 0... why?
853875

854876
polys = [zonal_south, zonal_north, zonal_short, zonal_full]
855-
polys = segmentize(polys, 1)
877+
polys = segmentize(polys, 0.9)
856878

857879
# Create field of ones on a global grid
858880
ds = xe.util.grid_global(20, 12, cf=True)
@@ -865,6 +887,7 @@ def test_spatial_averager_with_zonal_region():
865887
assert_allclose(out, 1, rtol=1e-3)
866888

867889

890+
@pytest.mark.filterwarnings('ignore:`polys` contains large')
868891
def test_compare_weights_from_poly_and_grid():
869892
"""Confirm that the weights are identical when they are computed from a grid->grid and grid->poly."""
870893

@@ -1005,10 +1028,3 @@ def test_spatial_averager_mask():
10051028
savg = xe.SpatialAverager(dsm, [poly], geom_dim_name='my_geom')
10061029
out = savg(dsm.abc)
10071030
assert_allclose(out, 2, rtol=1e-3)
1008-
1009-
1010-
def test_densify_polys():
1011-
# Check that using a large poly raises a warning
1012-
poly = Polygon([(-80, -40), (80, -40), (80, 40), (-80, 40)]) # Large poly
1013-
with pytest.warns(UserWarning):
1014-
xe.SpatialAverager(ds_in, [poly])

xesmf/tests/test_oceanmodels.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,8 @@ def test_mom6like_to_5x5():
7575
mom6like.rename({'xh': 'lon', 'yh': 'lat'}), grid_5x5, 'bilinear', periodic=True
7676
)
7777

78-
tos_regridded = regrid_to_5x5(mom6like['tos'])
78+
with pytest.warns(UserWarning, match=r"Using dimensions \('yh', 'xh'\)"):
79+
tos_regridded = regrid_to_5x5(mom6like['tos'])
7980
assert tos_regridded.shape == ((2, 36, 72))
8081

8182

0 commit comments

Comments
 (0)