Skip to content

Commit

Permalink
Merge branch 'master' into pr/andrew-s28/1672
Browse files Browse the repository at this point in the history
  • Loading branch information
VeckoTheGecko committed Sep 2, 2024
2 parents 0cb1f9e + 679d06d commit 4defe1f
Show file tree
Hide file tree
Showing 27 changed files with 421 additions and 212 deletions.
2 changes: 0 additions & 2 deletions .github/actions/install-parcels/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,6 @@ runs:
environment-file: ${{ inputs.environment-file }}
python-version: ${{ inputs.python-version }}
channels: conda-forge
cache-environment: true
cache-downloads: true
- name: MPI support
if: ${{ ! (runner.os == 'Windows') }}
run: conda install -c conda-forge mpich mpi4py
Expand Down
6 changes: 6 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"
62 changes: 50 additions & 12 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,13 @@ on:
- "master"
- "test-me/*"
pull_request:
branches:
- "*"
schedule:
- cron: "0 7 * * 1" # Run every Monday at 7:00 UTC

concurrency:
group: branch-${{ github.head_ref }}
cancel-in-progress: true

defaults:
run:
shell: bash -el {0}
Expand All @@ -30,7 +32,7 @@ jobs:
python-version: "3.11"
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Setup Conda and parcels
uses: ./.github/actions/install-parcels
with:
Expand All @@ -41,15 +43,16 @@ jobs:
coverage run -m pytest -v -s --html=${{ matrix.os }}_${{ matrix.python-version }}_unit_test_report.html --self-contained-html tests
coverage xml
- name: Codecov
uses: codecov/[email protected]
uses: codecov/[email protected]
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: unit-tests
- name: Upload test results
if: ${{ always() }} # Always run this step, even if tests fail
uses: actions/upload-artifact@v3.1.2
uses: actions/upload-artifact@v4
with:
name: Unittest report
name: Unittest report ${{ matrix.os }}-${{ matrix.python-version }}
path: ${{ matrix.os }}_${{ matrix.python-version }}_unit_test_report.html
integration-test:
name: "py${{ matrix.python-version }} | ${{ matrix.os }} | integration tests"
Expand All @@ -61,7 +64,7 @@ jobs:
python-version: ["3.12"]
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Setup Conda and parcels
uses: ./.github/actions/install-parcels
with:
Expand All @@ -71,13 +74,48 @@ jobs:
coverage run -m pytest -v -s --nbval-lax -k "not documentation" --html="${{ matrix.os }}_integration_test_report.html" --self-contained-html docs/examples
coverage xml
- name: Codecov
uses: codecov/[email protected]
uses: codecov/[email protected]
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: integration-tests
- name: Upload test results
if: ${{ always() }} # Always run this step, even if tests fail
uses: actions/upload-artifact@v3.1.2
uses: actions/upload-artifact@v4
with:
name: Integration test report
name: Integration test report ${{ matrix.os }}
path: ${{ matrix.os }}_integration_test_report.html
merge-test-artifacts:
runs-on: ubuntu-latest
needs:
- unit-test
- integration-test
steps:
- name: Merge Artifacts
uses: actions/upload-artifact/merge@v4
with:
name: Testing reports
pattern: "* report *"
typechecking:
name: mypy
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Conda and parcels
uses: ./.github/actions/install-parcels
with:
environment-file: environment.yml
- run: conda install lxml # dep for report generation
- name: Typechecking
run: |
mypy --install-types --non-interactive parcels --cobertura-xml-report mypy_report
- name: Upload mypy coverage to Codecov
uses: codecov/[email protected]
if: ${{ always() }} # Upload even on error of mypy
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
file: mypy_report/cobertura.xml
flags: mypy
fail_ci_if_error: false
16 changes: 8 additions & 8 deletions .github/workflows/pypi-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,10 @@ jobs:
runs-on: ubuntu-latest
if: github.repository == 'OceanParcels/parcels'
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-python@v3
- uses: actions/setup-python@v5
name: Install Python
with:
python-version: 3.10
Expand All @@ -42,7 +42,7 @@ jobs:
else
echo "✅ Looks good"
fi
- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v4
with:
name: releases
path: dist
Expand All @@ -51,11 +51,11 @@ jobs:
needs: build-artifacts
runs-on: ubuntu-latest
steps:
- uses: actions/setup-python@v3
- uses: actions/setup-python@v5
name: Install Python
with:
python-version: 3.10
- uses: actions/download-artifact@v3
- uses: actions/download-artifact@v4
with:
name: releases
path: dist
Expand All @@ -72,7 +72,7 @@ jobs:
- name: Publish package to TestPyPI
if: github.event_name == 'push'
uses: pypa/gh-action-pypi-publish@v1.5.0
uses: pypa/gh-action-pypi-publish@v1.9.0
with:
user: __token__
password: ${{ secrets.PARCELS_PYPI_TEST_TOKEN }}
Expand All @@ -84,12 +84,12 @@ jobs:
if: github.event_name == 'release'
runs-on: ubuntu-latest
steps:
- uses: actions/download-artifact@v3
- uses: actions/download-artifact@v4
with:
name: releases
path: dist
- name: Publish package to PyPI
uses: pypa/gh-action-pypi-publish@v1.5.0
uses: pypa/gh-action-pypi-publish@v1.9.0
with:
user: __token__
password: ${{ secrets.PARCELS_PYPI_PROD_TOKEN }}
Expand Down
17 changes: 15 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,27 @@ repos:
rev: v0.5.6
hooks:
- id: ruff
args: [ --fix ]
args: [--fix, --show-fixes]
- id: ruff
name: ruff (isort jupyter)
args: [--select, I, --fix]
types_or: [ jupyter ]
types_or: [jupyter]
- id: ruff-format
types_or: [ python, jupyter ]
- repo: https://github.com/biomejs/pre-commit
rev: v0.4.0
hooks:
- id: biome-format

# Ruff doesn't have full coverage of pydoclint https://github.com/astral-sh/ruff/issues/12434
- repo: https://github.com/PyCQA/flake8
rev: 7.0.0
hooks:
- id: flake8
name: pydoclint
files: "none"
# files: parcels/fieldset.py # put here instead of in config file due to https://github.com/pre-commit/pre-commit-hooks/issues/112#issuecomment-215613842
args:
- --select=DOC103 # TODO: Expand coverage to other codes
additional_dependencies:
- pydoclint[flake8]
7 changes: 4 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
## Parcels

[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/OceanParcels/parcels/master?labpath=docs%2Fexamples%2Fparcels_tutorial.ipynb)
[![unit-tests](https://github.com/OceanParcels/parcels/actions/workflows/unit-tests.yml/badge.svg)](https://github.com/OceanParcels/parcels/actions/workflows/unit-tests.yml)
[![codecov](https://codecov.io/gh/OceanParcels/parcels/branch/master/graph/badge.svg)](https://codecov.io/gh/OceanParcels/parcels)
[![Anaconda-release](https://anaconda.org/conda-forge/parcels/badges/version.svg)](https://anaconda.org/conda-forge/parcels/)
[![Anaconda-date](https://anaconda.org/conda-forge/parcels/badges/latest_release_date.svg)](https://anaconda.org/conda-forge/parcels/)
[![Zenodo](https://zenodo.org/badge/DOI/10.5281/zenodo.823561.svg)](https://doi.org/10.5281/zenodo.823561)
[![Code style: Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/format.json)](https://github.com/astral-sh/ruff)
[![unit-tests](https://github.com/OceanParcels/parcels/actions/workflows/ci.yml/badge.svg)](https://github.com/OceanParcels/parcels/actions/workflows/ci.yml)
[![codecov](https://codecov.io/gh/OceanParcels/parcels/branch/master/graph/badge.svg)](https://codecov.io/gh/OceanParcels/parcels)
[![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/5353/badge)](https://bestpractices.coreinfrastructure.org/projects/5353)
[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/OceanParcels/parcels/master?labpath=docs%2Fexamples%2Fparcels_tutorial.ipynb)

**Parcels** (**P**robably **A** **R**eally **C**omputationally **E**fficient **L**agrangian **S**imulator) is a set of Python classes and methods to create customisable particle tracking simulations using output from Ocean Circulation models. Parcels can be used to track passive and active particulates such as water, plankton, [plastic](http://www.topios.org/) and [fish](https://github.com/Jacketless/IKAMOANA).

Expand Down
1 change: 0 additions & 1 deletion codecov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,5 @@ comment:
require_base: false
require_head: true
hide_project_coverage: true

# When modifying this file, please validate using
# curl -X POST --data-binary @codecov.yml https://codecov.io/validate
7 changes: 5 additions & 2 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,13 @@ dependencies:
- pytest-html
- coverage

# Typing
- mypy
- types-tqdm
- types-psutil

# Linting
- flake8>=2.1.0
- pre_commit
- pydocstyle

# Docs
- ipython
Expand Down
19 changes: 19 additions & 0 deletions parcels/_compat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
"""Import helpers for compatability between installations."""

__all__ = ["MPI", "KMeans"]

from typing import Any

MPI: Any | None = None
KMeans: Any | None = None

try:
from mpi4py import MPI # type: ignore[no-redef]
except ModuleNotFoundError:
pass

# KMeans is used in MPI. sklearn not installed by default
try:
from sklearn.cluster import KMeans # type: ignore[no-redef]
except ModuleNotFoundError:
pass
45 changes: 45 additions & 0 deletions parcels/_typing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
"""
Typing support for Parcels.
This module contains type aliases used throughout Parcels as well as functions that are
used for runtime parameter validation (to ensure users are only using the right params).
"""

import ast
import datetime
import os
from typing import Callable, Literal


class ParcelsAST(ast.AST):
ccode: str


InterpMethodOption = Literal[
"linear",
"nearest",
"freeslip",
"partialslip",
"bgrid_velocity",
"bgrid_w_velocity",
"cgrid_velocity",
"linear_invdist_land_tracer",
"nearest",
"bgrid_tracer",
"cgrid_tracer",
] # corresponds with `tracer_interp_method`
InterpMethod = (
InterpMethodOption | dict[str, InterpMethodOption]
) # corresponds with `interp_method` (which can also be dict mapping field names to method)
PathLike = str | os.PathLike
Mesh = Literal["spherical", "flat"] # corresponds with `mesh`
VectorType = Literal["3D", "2D"] | None # corresponds with `vector_type`
ChunkMode = Literal["auto", "specific", "failsafe"] # corresponds with `chunk_mode`
GridIndexingType = Literal["pop", "mom5", "mitgcm", "nemo"] # corresponds with `grid_indexing_type`
UpdateStatus = Literal["not_updated", "first_updated", "updated"] # corresponds with `update_status`
TimePeriodic = float | datetime.timedelta | Literal[False] # corresponds with `update_status`
NetcdfEngine = Literal["netcdf4", "xarray", "scipy"]


KernelFunction = Callable[..., None]
5 changes: 1 addition & 4 deletions parcels/compilation/codecompiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,7 @@
import subprocess
from struct import calcsize

try:
from mpi4py import MPI
except ModuleNotFoundError:
MPI = None
from parcels._compat import MPI

_tmp_dir = os.getcwd()

Expand Down
6 changes: 3 additions & 3 deletions parcels/compilation/codegenerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -415,7 +415,7 @@ class KernelGenerator(ABC, ast.NodeVisitor):

# Intrinsic variables that appear as function arguments
kernel_vars = ["particle", "fieldset", "time", "output_time", "tol"]
array_vars = []
array_vars: list[str] = []

def __init__(self, fieldset=None, ptype=JITParticle):
self.fieldset = fieldset
Expand All @@ -424,7 +424,7 @@ def __init__(self, fieldset=None, ptype=JITParticle):
self.vector_field_args = collections.OrderedDict()
self.const_args = collections.OrderedDict()

def generate(self, py_ast, funcvars):
def generate(self, py_ast, funcvars: list[str]):
# Replace occurrences of intrinsic objects in Python AST
transformer = IntrinsicTransformer(self.fieldset, self.ptype)
py_ast = transformer.visit(py_ast)
Expand All @@ -439,7 +439,7 @@ def generate(self, py_ast, funcvars):
# Insert variable declarations for non-intrinsic variables
# Make sure that repeated variables are not declared more than
# once. If variables occur in multiple Kernels, give a warning
used_vars = []
used_vars: list[str] = []
funcvars_copy = copy(funcvars) # editing a list while looping over it is dangerous
for kvar in funcvars:
if kvar in used_vars + ["particle_dlon", "particle_dlat", "particle_ddepth"]:
Expand Down
Loading

0 comments on commit 4defe1f

Please sign in to comment.