Skip to content

Commit 0eea8b5

Browse files
Fixing more particlefile unit tests
1 parent 268d294 commit 0eea8b5

File tree

1 file changed

+17
-18
lines changed

1 file changed

+17
-18
lines changed

tests/test_particlefile.py

Lines changed: 17 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,18 @@
77
import xarray as xr
88
from zarr.storage import MemoryStore
99

10-
import parcels
11-
from parcels import Field, FieldSet, Particle, ParticleFile, ParticleSet, StatusCode, Variable, VectorField, XGrid
10+
from parcels import (
11+
Field,
12+
FieldSet,
13+
Particle,
14+
ParticleFile,
15+
ParticleSet,
16+
StatusCode,
17+
Variable,
18+
VectorField,
19+
XGrid,
20+
download_example_dataset,
21+
)
1222
from parcels._core.particle import Particle, create_particle_data, get_default_particle
1323
from parcels._core.utils.time import TimeInterval
1424
from parcels._datasets.structured.generic import datasets
@@ -317,13 +327,11 @@ def Update_lon(particles, fieldset): # pragma: no cover
317327
assert np.allclose(pset.lon, 0.6)
318328

319329

320-
@pytest.mark.v4alpha
321-
@pytest.mark.xfail
322330
def test_correct_misaligned_outputdt_dt(fieldset, tmp_zarrfile):
323331
"""Testing that outputdt does not need to be a multiple of dt."""
324332

325333
def Update_lon(particles, fieldset): # pragma: no cover
326-
particles.dlon += particles.dt / np.timedelta64(1, "s")
334+
particles.lon += particles.dt / np.timedelta64(1, "s")
327335

328336
particle = get_default_particle(np.float64)
329337
pset = ParticleSet(fieldset, pclass=particle, lon=[0], lat=[0])
@@ -332,9 +340,7 @@ def Update_lon(particles, fieldset): # pragma: no cover
332340

333341
ds = xr.open_zarr(tmp_zarrfile)
334342
assert np.allclose(ds.lon.values, [0, 3, 6, 9])
335-
assert np.allclose(
336-
ds.time.values[0, :], [np.timedelta64(t, "s") for t in [0, 3, 6, 9]], atol=np.timedelta64(1, "ns")
337-
)
343+
assert np.allclose((ds.time.values - ds.time.values[0, 0]) / np.timedelta64(1, "s"), [0, 3, 6, 9])
338344

339345

340346
def setup_pset_execute(*, fieldset: FieldSet, outputdt: timedelta, execute_kwargs, particle_class=Particle):
@@ -382,7 +388,6 @@ def test_pset_execute_output_time_forwards(fieldset):
382388
)
383389

384390

385-
@pytest.mark.skip(reason="backwards in time not yet working")
386391
def test_pset_execute_outputdt_backwards(fieldset):
387392
"""Testing output data dt matches outputdt in backwards time."""
388393
outputdt = timedelta(hours=1)
@@ -394,7 +399,6 @@ def test_pset_execute_outputdt_backwards(fieldset):
394399
assert np.all(file_outputdt == np.timedelta64(-outputdt))
395400

396401

397-
@pytest.mark.xfail(reason="TODO v4: Update dataset loading")
398402
def test_pset_execute_outputdt_backwards_fieldset_timevarying():
399403
"""test_pset_execute_outputdt_backwards() still passed despite #1722 as it doesn't account for time-varying fields,
400404
which for some reason #1722
@@ -404,14 +408,9 @@ def test_pset_execute_outputdt_backwards_fieldset_timevarying():
404408
dt = -timedelta(minutes=5)
405409

406410
# TODO: Not ideal using the `download_example_dataset` here, but I'm struggling to recreate this error using the test suite fieldsets we have
407-
example_dataset_folder = parcels.download_example_dataset("MovingEddies_data")
408-
filenames = {
409-
"U": str(example_dataset_folder / "moving_eddiesU.nc"),
410-
"V": str(example_dataset_folder / "moving_eddiesV.nc"),
411-
}
412-
variables = {"U": "vozocrtx", "V": "vomecrty"}
413-
dimensions = {"lon": "nav_lon", "lat": "nav_lat", "time": "time_counter"}
414-
fieldset = parcels.FieldSet.from_netcdf(filenames, variables, dimensions)
411+
example_dataset_folder = download_example_dataset("CopernicusMarine_data_for_Argo_tutorial")
412+
ds_in = xr.open_mfdataset(f"{example_dataset_folder}/*.nc", combine="by_coords")
413+
fieldset = FieldSet.from_copernicusmarine(ds_in)
415414

416415
ds = setup_pset_execute(outputdt=outputdt, execute_kwargs=dict(runtime=runtime, dt=dt), fieldset=fieldset)
417416
file_outputdt = ds.isel(trajectory=0).time.diff(dim="obs").values

0 commit comments

Comments
 (0)