Skip to content

Commit 5749f57

Browse files
reint-fischerreint-fischer
authored andcommitted
Merge branch 'v4-dev' into documentation-structure-v4
2 parents 0626bee + 254090a commit 5749f57

39 files changed

+648
-934
lines changed

.github/workflows/additional.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ jobs:
1313
shell: bash -l {0}
1414

1515
steps:
16-
- uses: actions/checkout@v4
16+
- uses: actions/checkout@v5
1717
- name: Setup micromamba
1818
uses: mamba-org/setup-micromamba@v2
1919
with:
@@ -36,7 +36,7 @@ jobs:
3636
shell: bash -l {0}
3737

3838
steps:
39-
- uses: actions/checkout@v4
40-
- uses: actions/checkout@v4
39+
- uses: actions/checkout@v5
40+
- uses: actions/checkout@v5
4141
- uses: prefix-dev/[email protected]
4242
- run: pixi run docs-linkcheck

.github/workflows/cache-pixi-lock.yml

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,9 @@ name: Generate and cache Pixi lockfile
22

33
on:
44
workflow_call:
5+
inputs:
6+
pixi-version:
7+
type: string
58
outputs:
69
cache-id:
710
description: "The lock file contents"
@@ -26,11 +29,11 @@ jobs:
2629
with:
2730
path: |
2831
pixi.lock
29-
key: ${{ steps.date.outputs.date }}_${{hashFiles('pixi.toml')}}
32+
key: ${{ steps.date.outputs.date }}_${{inputs.pixi-version}}_${{hashFiles('pixi.toml')}}
3033
- uses: prefix-dev/[email protected]
3134
if: ${{ !steps.restore.outputs.cache-hit }}
3235
with:
33-
pixi-version: v0.56.0
36+
pixi-version: ${{ inputs.pixi-version }}
3437
run-install: false
3538
- name: Run pixi lock
3639
if: ${{ !steps.restore.outputs.cache-hit }}
@@ -43,7 +46,7 @@ jobs:
4346
pixi.lock
4447
key: ${{ steps.restore.outputs.cache-primary-key }}
4548
- name: Upload pixi.lock
46-
uses: actions/upload-artifact@v4
49+
uses: actions/upload-artifact@v5
4750
with:
4851
name: pixi-lock
4952
path: pixi.lock

.github/workflows/ci.yml

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ jobs:
2828
strategy:
2929
fail-fast: false
3030
matrix:
31-
os: [ubuntu] #, mac, windows] # TODO v4: Re-enable windows and mac
31+
os: [ubuntu, mac, windows]
3232
pixi-environment: [test-latest]
3333
include:
3434
- os: ubuntu
@@ -38,11 +38,12 @@ jobs:
3838
- os: ubuntu
3939
pixi-environment: "test-minimum"
4040
steps:
41-
- uses: actions/checkout@v4
41+
- uses: actions/checkout@v5
4242
- name: Restore cached pixi lockfile
4343
uses: actions/cache/restore@v4
4444
id: restore-pixi-lock
4545
with:
46+
enableCrossOsArchive: true
4647
path: |
4748
pixi.lock
4849
key: ${{ needs.cache-pixi-lock.outputs.cache-id }}
@@ -54,14 +55,14 @@ jobs:
5455
run: |
5556
pixi run -e ${{ matrix.pixi-environment }} tests -v -s --cov=parcels --cov-report=xml --html="${{ env.COVERAGE_REPORT }}" --self-contained-html
5657
- name: Codecov
57-
uses: codecov/codecov-action@v5.3.1
58+
uses: codecov/codecov-action@v5.5.1
5859
env:
5960
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
6061
with:
6162
flags: unit-tests
6263
- name: Upload test results
6364
if: ${{ always() }} # Always run this step, even if tests fail
64-
uses: actions/upload-artifact@v4
65+
uses: actions/upload-artifact@v5
6566
with:
6667
name: Unittest report ${{ matrix.os }}-${{ matrix.pixi-environment }}
6768
path: ${{ env.COVERAGE_REPORT }}
@@ -82,11 +83,12 @@ jobs:
8283
- os: ubuntu
8384
python-version: "3.11"
8485
steps:
85-
- uses: actions/checkout@v4
86+
- uses: actions/checkout@v5
8687
- name: Restore cached pixi lockfile
8788
uses: actions/cache/restore@v4
8889
id: restore-pixi-lock
8990
with:
91+
enableCrossOsArchive: true
9092
path: |
9193
pixi.lock
9294
key: ${{ needs.cache-pixi-lock.outputs.cache-id }}
@@ -98,14 +100,14 @@ jobs:
98100
run: |
99101
pixi run test-notebooks -v -s --html="${{ env.COVERAGE_REPORT }}" --self-contained-html --cov=parcels --cov-report=xml
100102
- name: Codecov
101-
uses: codecov/codecov-action@v5.3.1
103+
uses: codecov/codecov-action@v5.5.1
102104
env:
103105
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
104106
with:
105107
flags: integration-tests
106108
- name: Upload test results
107109
if: ${{ always() }} # Always run this step, even if tests fail
108-
uses: actions/upload-artifact@v4
110+
uses: actions/upload-artifact@v5
109111
with:
110112
name: Integration test report ${{ matrix.os }}-${{ matrix.pixi-environment }}
111113
path: ${{ env.COVERAGE_REPORT }}
@@ -117,7 +119,7 @@ jobs:
117119
- typechecking
118120
steps:
119121
- name: Merge Artifacts
120-
uses: actions/upload-artifact/merge@v4
122+
uses: actions/upload-artifact/merge@v5
121123
with:
122124
name: Testing reports
123125
pattern: "* report *"
@@ -129,7 +131,7 @@ jobs:
129131
needs: [cache-pixi-lock]
130132
steps:
131133
- name: Checkout
132-
uses: actions/checkout@v4
134+
uses: actions/checkout@v5
133135
- name: Restore cached pixi lockfile
134136
uses: actions/cache/restore@v4
135137
id: restore-pixi-lock
@@ -146,7 +148,7 @@ jobs:
146148
pixi run typing --non-interactive --html-report mypy-report
147149
- name: Upload test results
148150
if: ${{ always() }} # Upload even on mypy error
149-
uses: actions/upload-artifact@v4
151+
uses: actions/upload-artifact@v5
150152
with:
151153
name: Mypy report
152154
path: mypy-report

.github/workflows/nightly.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,9 @@ jobs:
1111
build:
1212
runs-on: ubuntu-latest
1313
steps:
14-
- uses: actions/checkout@v4
14+
- uses: actions/checkout@v5
1515
- name: Build conda package
16-
uses: prefix-dev/[email protected].19
16+
uses: prefix-dev/[email protected].34
1717
with:
1818
recipe-path: .github/ci/recipe.yaml
1919

.github/workflows/pypi-release.yml

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,10 @@ jobs:
1313
runs-on: ubuntu-latest
1414
if: github.repository == 'Parcels-code/parcels'
1515
steps:
16-
- uses: actions/checkout@v4
16+
- uses: actions/checkout@v5
1717
with:
1818
fetch-depth: 0
19-
- uses: actions/setup-python@v5
19+
- uses: actions/setup-python@v6
2020
name: Install Python
2121
with:
2222
python-version: "3.11"
@@ -42,7 +42,7 @@ jobs:
4242
else
4343
echo "✅ Looks good"
4444
fi
45-
- uses: actions/upload-artifact@v4
45+
- uses: actions/upload-artifact@v5
4646
with:
4747
name: releases
4848
path: dist
@@ -51,11 +51,11 @@ jobs:
5151
needs: build-artifacts
5252
runs-on: ubuntu-latest
5353
steps:
54-
- uses: actions/setup-python@v5
54+
- uses: actions/setup-python@v6
5555
name: Install Python
5656
with:
5757
python-version: "3.11"
58-
- uses: actions/download-artifact@v4
58+
- uses: actions/download-artifact@v6
5959
with:
6060
name: releases
6161
path: dist
@@ -84,7 +84,7 @@ jobs:
8484
if: github.event_name == 'release'
8585
runs-on: ubuntu-latest
8686
steps:
87-
- uses: actions/download-artifact@v4
87+
- uses: actions/download-artifact@v6
8888
with:
8989
name: releases
9090
path: dist

.pre-commit-config.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
repos:
22
- repo: https://github.com/pre-commit/pre-commit-hooks
3-
rev: v5.0.0
3+
rev: v6.0.0
44
hooks:
55
- id: trailing-whitespace
66
- id: end-of-file-fixer
@@ -10,7 +10,7 @@ repos:
1010
types: [text]
1111
files: \.(json|ipynb)$
1212
- repo: https://github.com/astral-sh/ruff-pre-commit
13-
rev: v0.12.5
13+
rev: v0.14.2
1414
hooks:
1515
- id: ruff
1616
name: ruff lint (.py)

docs/user_guide/examples_v3/tutorial_kernelloop.ipynb

Lines changed: 12 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
"\n",
2727
"When you run a Parcels simulation (i.e. a call to `pset.execute()`), the Kernel loop is the main part of the code that is executed. This part of the code loops through all particles and executes the Kernels that are defined for each particle.\n",
2828
"\n",
29-
"In order to make sure that the displacements of a particle in the different Kernels can be summed, all Kernels add to a _change_ in position (`particles.dlon`, `particles.dlat`, and `particles.ddepth`). This is important, because there are situations where movement kernels would otherwise not commute. Take the example of advecting particles by currents _and_ winds. If the particle would first be moved by the currents and then by the winds, the result could be different from first moving by the winds and then by the currents. Instead, by adding the changes in position, the ordering of the Kernels has no consequence on the particle displacement."
29+
"In order to make sure that the displacements of a particle in the different Kernels can be summed, all Kernels add to a _change_ in position (`particles.dlon`, `particles.dlat`, and `particles.dz`). This is important, because there are situations where movement kernels would otherwise not commute. Take the example of advecting particles by currents _and_ winds. If the particle would first be moved by the currents and then by the winds, the result could be different from first moving by the winds and then by the currents. Instead, by adding the changes in position, the ordering of the Kernels has no consequence on the particle displacement."
3030
]
3131
},
3232
{
@@ -40,29 +40,27 @@
4040
"cell_type": "markdown",
4141
"metadata": {},
4242
"source": [
43-
"Below is a structured overview of the Kernel loop is implemented. Note that this is for longitude only, but the same process is applied for latitude and depth.\n",
43+
"Below is a structured overview of the Kernel loop is implemented. Note that this is for `lon` only, but the same process is applied for `lat` and `z`.\n",
4444
"\n",
45-
"1. Initialise an extra Variable `particles.lon=0` and `particles.time_nextloop = particles.time`\n",
45+
"1. Initialise an extra Variable `particles.dlon=0`\n",
4646
"\n",
4747
"2. Within the Kernel loop, for each particle:<br>\n",
4848
"\n",
4949
" 1. Update `particles.lon += particles.dlon`<br>\n",
5050
"\n",
51-
" 2. Set variable `particles.dlon = 0`<br>\n",
51+
" 2. Update `particles.time += particles.dt` (except for on the first iteration of the Kernel loop)<br>\n",
5252
"\n",
53-
" 3. Update `particles.time = particles.time_nextloop`\n",
53+
" 3. Set variable `particles.dlon = 0`<br>\n",
5454
"\n",
5555
" 4. For each Kernel in the list of Kernels:\n",
5656
" \n",
5757
" 1. Execute the Kernel\n",
5858
" \n",
5959
" 2. Update `particles.dlon` by adding the change in longitude, if needed<br>\n",
6060
"\n",
61-
" 5. Update `particles.time_nextloop += particles.dt`<br>\n",
61+
" 5. If `outputdt` is a multiple of `particle.time`, write `particle.lon` and `particle.time` to zarr output file<br>\n",
6262
"\n",
63-
" 6. If `outputdt` is a multiple of `particle.time`, write `particle.lon` and `particle.time` to zarr output file<br>\n",
64-
"\n",
65-
"Besides having commutable Kernels, the main advantage of this implementation is that, when using Field Sampling with e.g. `particle.temp = fieldset.Temp[particle.time, particle.depth, particle.lat, particle.lon]`, the particle location stays the same throughout the entire Kernel loop. Additionally, this implementation ensures that the particle location is the same as the location of the sampled field in the output file."
63+
"Besides having commutable Kernels, the main advantage of this implementation is that, when using Field Sampling with e.g. `particle.temp = fieldset.Temp[particle.time, particle.z, particle.lat, particle.lon]`, the particle location stays the same throughout the entire Kernel loop. Additionally, this implementation ensures that the particle location is the same as the location of the sampled field in the output file."
6664
]
6765
},
6866
{
@@ -155,10 +153,10 @@
155153
"source": [
156154
"def wind_kernel(particle, fieldset, time):\n",
157155
" particle_dlon += (\n",
158-
" fieldset.UWind[time, particle.depth, particle.lat, particle.lon] * particle.dt\n",
156+
" fieldset.UWind[time, particle.z, particle.lat, particle.lon] * particle.dt\n",
159157
" )\n",
160158
" particle_dlat += (\n",
161-
" fieldset.VWind[time, particle.depth, particle.lat, particle.lon] * particle.dt\n",
159+
" fieldset.VWind[time, particle.z, particle.lat, particle.lon] * particle.dt\n",
162160
" )"
163161
]
164162
},
@@ -242,30 +240,16 @@
242240
"cell_type": "markdown",
243241
"metadata": {},
244242
"source": [
245-
"## Caveats"
243+
"## Caveat: Avoid updating particle locations directly in Kernels"
246244
]
247245
},
248246
{
249247
"cell_type": "markdown",
250248
"metadata": {},
251249
"source": [
252-
"There are a few important considerations to take into account when writing Kernels\n",
253-
"\n",
254-
"### 1. Avoid updating particle locations directly in Kernels\n",
255250
"It is better not to update `particle.lon` directly in a Kernel, as it can interfere with the loop above. Assigning a value to `particle.lon` in a Kernel will throw a warning. \n",
256251
"\n",
257-
"Instead, update the local variable `particle.dlon`.\n",
258-
"\n",
259-
"### 2. Be careful with updating particle variables that do not depend on Fields.\n",
260-
"While assigning the interpolated value of a `Field` to a Particle goes well in the loop above, this is not necessarily so for assigning other attributes. For example, a line like `particle.age += particle.dt` is executed directly so may result in the age being `dt` at `time = 0` in the output file. \n",
261-
"\n",
262-
"A workaround is to either initialise the age to `-dt`, or to increase the `age` only when `particle.time > 0` (using an `np.where` statement).\n",
263-
"\n",
264-
"\n",
265-
"### 3. The last time is not written to file\n",
266-
"Because the location at the start of the loop is written at the end of the Kernel loop, the last `particle.time` of the particle is not written to file. This is similar behaviour to e.g. `np.arange(start, stop)`, which also doesn't include the `stop` value itself. \n",
267-
"\n",
268-
"If you do want to write the last time to file, you can increase the `runtime` or `endtime` by `dt` (although this may cause an OutsideTimeInterval if your run was to the end of the available hydrodynamic data), or you can call `pfile.write_latest_locations(pset, time=pset[0].time_nextloop)`. Note that in the latter case, the particle locations (longitude, latitude and depth) will be updated, but other variables will _not_ be updated as the Kernels are not run again."
252+
"Instead, update the local variable `particle.dlon`."
269253
]
270254
},
271255
{
@@ -355,7 +339,7 @@
355339
"source": [
356340
"def KeepInOcean(particle, fieldset, time):\n",
357341
" if particle.state == StatusCode.ErrorThroughSurface:\n",
358-
" particle_ddepth = 0.0\n",
342+
" particle_dz = 0.0\n",
359343
" particle.state = StatusCode.Success"
360344
]
361345
},

docs/user_guide/v4-migration.md

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,3 +34,12 @@ Version 4 of Parcels is unreleased at the moment. The information in this migrat
3434

3535
- Particlefiles should be created by `ParticleFile(...)` instead of `pset.ParticleFile(...)`
3636
- The `name` argument in `ParticleFile` has been replaced by `store` and can now be a string, a Path or a zarr store.
37+
38+
## Field
39+
40+
- `Field.eval()` returns an array of floats instead of a single float (related to the vectorization)
41+
- `Field.eval()` does not throw OutOfBounds or other errors
42+
43+
## GridSet
44+
45+
- `GridSet` is now a list, so change `fieldset.gridset.grids[0]` to `fieldset.gridset[0]`.

pyproject.toml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -100,8 +100,13 @@ ignore = [
100100
"RUF015",
101101
# Use `X | Y` in `isinstance` (see https://github.com/home-assistant/core/issues/123850)
102102
"UP038",
103+
# Pattern passed to `match=` contains metacharacters but is neither escaped nor raw
104+
"RUF043",
103105
"RUF046", # Value being cast to `int` is already an integer
104106

107+
# TODO: Move this ignore so that it only applies in the tests folder. Do in conjunction with any doc related rules
108+
"RUF059", # Unpacked variable `coords` is never used
109+
105110
# TODO: ignore for now (requires more work). Remove ignore once fixed
106111
# Missing docstring in public module
107112
"D100",

0 commit comments

Comments
 (0)