Skip to content

Commit 5c82b92

Browse files
authored
Merge branch 'master' into runid-sort
2 parents e23d622 + afb5ba6 commit 5c82b92

File tree

12 files changed

+765
-67
lines changed

12 files changed

+765
-67
lines changed

.github/workflows/master.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ jobs:
2020
if: always()
2121
strategy:
2222
matrix:
23-
python-version: [3.7, 3.8, 3.9]
23+
python-version: [3.8, 3.9, '3.10']
2424
pip-packages:
2525
- "setuptools pip pytest pytest-cov coverage codecov boutdata xarray numpy>=1.16.0"
2626
fail-fast: false
@@ -33,7 +33,7 @@ jobs:
3333
python-version: ${{ matrix.python-version }}
3434
- name: Install dependencies
3535
run: |
36-
sudo apt-get install libhdf5-dev libnetcdf-dev
36+
sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev
3737
python -m pip install --upgrade pip
3838
pip install --upgrade ${{ matrix.pip-packages }}
3939
pip install -e .
@@ -53,7 +53,7 @@ jobs:
5353
if: always()
5454
strategy:
5555
matrix:
56-
python-version: [3.7, 3.8]
56+
python-version: [3.8]
5757
pip-packages:
5858
- "setuptools pip pytest pytest-cov coverage codecov boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==6.1.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0.
5959
fail-fast: false
@@ -66,7 +66,7 @@ jobs:
6666
python-version: ${{ matrix.python-version }}
6767
- name: Install dependencies
6868
run: |
69-
sudo apt-get install libhdf5-dev libnetcdf-dev
69+
sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev
7070
python -m pip install --upgrade pip
7171
pip install --upgrade ${{ matrix.pip-packages }}
7272
pip install -e .

.github/workflows/pythonpackage.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ jobs:
3333
python-version: ${{ matrix.python-version }}
3434
- name: Install dependencies
3535
run: |
36-
sudo apt-get install libhdf5-dev libnetcdf-dev
36+
sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev
3737
python -m pip install --upgrade pip
3838
pip install --upgrade ${{ matrix.pip-packages }}
3939
pip install -e .
@@ -66,7 +66,7 @@ jobs:
6666
python-version: ${{ matrix.python-version }}
6767
- name: Install dependencies
6868
run: |
69-
sudo apt-get install libhdf5-dev libnetcdf-dev
69+
sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev
7070
python -m pip install --upgrade pip
7171
pip install --upgrade ${{ matrix.pip-packages }}
7272
pip install -e .

.github/workflows/pythonpublish.yml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ jobs:
1414
if: always()
1515
strategy:
1616
matrix:
17-
python-version: [3.7, 3.8, 3.9]
17+
python-version: [3.8, 3.9, '3.10']
1818
pip-packages:
1919
- "setuptools pip pytest pytest-cov coverage codecov boutdata xarray numpy>=1.16.0"
2020
fail-fast: true
@@ -27,7 +27,7 @@ jobs:
2727
python-version: ${{ matrix.python-version }}
2828
- name: Install dependencies
2929
run: |
30-
sudo apt-get install libhdf5-dev libnetcdf-dev
30+
sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev
3131
python -m pip install --upgrade pip
3232
pip install --upgrade ${{ matrix.pip-packages }}
3333
pip install -e .
@@ -47,7 +47,7 @@ jobs:
4747
if: always()
4848
strategy:
4949
matrix:
50-
python-version: [3.7, 3.8]
50+
python-version: [3.8]
5151
pip-packages:
5252
- "setuptools pip pytest pytest-cov coverage codecov boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==7.2.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0.
5353
fail-fast: true
@@ -60,7 +60,7 @@ jobs:
6060
python-version: ${{ matrix.python-version }}
6161
- name: Install dependencies
6262
run: |
63-
sudo apt-get install libhdf5-dev libnetcdf-dev
63+
sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev
6464
python -m pip install --upgrade pip
6565
pip install --upgrade ${{ matrix.pip-packages }}
6666
pip install -e .
@@ -123,7 +123,7 @@ jobs:
123123
python-version: '3.x'
124124
- name: Install dependencies
125125
run: |
126-
sudo apt-get install libhdf5-dev libnetcdf-dev
126+
sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev
127127
python -m pip install --upgrade pip
128128
pip install --upgrade setuptools wheel twine
129129
pip install -e .

setup.cfg

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ setup_requires =
2828
setuptools_scm[toml]>=3.4
2929
setuptools_scm_git_archive
3030
install_requires =
31-
xarray>=0.18.0,!=2022.9.0,!=2022.10.0
31+
xarray>=0.18.0,!=2022.9.0,!=2022.10.0,!=2022.11.0,!=2022.12.0
3232
boutdata>=0.1.4
3333
dask[array]>=2.10.0
3434
gelidum>=0.5.3
@@ -50,6 +50,10 @@ calc =
5050
xrft
5151
xhistogram
5252
docs = sphinx >= 1.4
53+
3d_plot =
54+
k3d >= 2.8.0
55+
mayavi >= 4.7.2
56+
wand
5357

5458
[build_sphinx]
5559
project = $metadata.name

xbout/boutdataarray.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -385,6 +385,9 @@ def interpolate_parallel(
385385

386386
return da
387387

388+
def add_cartesian_coordinates(self):
389+
return _add_cartesian_coordinates(self.data)
390+
388391
def add_cartesian_coordinates(self):
389392
"""
390393
Add Cartesian (X,Y,Z) coordinates.
@@ -1077,3 +1080,20 @@ def plot_regions(self, ax=None, **kwargs):
10771080
tokamak topology.
10781081
"""
10791082
return plotfuncs.plot_regions(self.data, ax=ax, **kwargs)
1083+
1084+
def plot3d(self, ax=None, **kwargs):
1085+
"""
1086+
Make a 3d plot
1087+
1088+
Warnings
1089+
--------
1090+
1091+
3d plotting functionality is still a bit of a work in progress. Bugs are likely, and
1092+
help developing is welcome!
1093+
1094+
Parameters
1095+
----------
1096+
1097+
See plotfuncs.plot3d()
1098+
"""
1099+
return plotfuncs.plot3d(self.data, **kwargs)

xbout/boutdataset.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -266,6 +266,9 @@ def find_with_dims(first_var, dims):
266266

267267
return ds
268268

269+
def add_cartesian_coordinates(self):
270+
return _add_cartesian_coordinates(self.data)
271+
269272
def integrate_midpoints(self, variable, *, dims=None, cumulative_t=False):
270273
"""
271274
Integrate using the midpoint rule for spatial dimensions, and trapezium rule for

xbout/load.py

Lines changed: 106 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,17 @@
3535
"MYPE",
3636
]
3737
_BOUT_TIME_DEPENDENT_META_VARS = ["iteration", "hist_hi", "tt"]
38+
_BOUT_GEOMETRY_VARS = [
39+
"ixseps1",
40+
"ixseps2",
41+
"jyseps1_1",
42+
"jyseps2_1",
43+
"jyseps1_2",
44+
"jyseps2_2",
45+
"nx",
46+
"ny",
47+
"ny_inner",
48+
]
3849

3950

4051
# This code should run whenever any function from this module is imported
@@ -350,6 +361,10 @@ def attrs_remove_section(obj, section):
350361
pass
351362
else:
352363
raise ValueError(msg)
364+
for v in _BOUT_GEOMETRY_VARS:
365+
if v not in ds.metadata and v in grid:
366+
ds.metadata[v] = grid[v].values
367+
353368
# Update coordinates to match particular geometry of grid
354369
ds = geometries.apply_geometry(ds, geometry, grid=grid)
355370

@@ -365,6 +380,42 @@ def attrs_remove_section(obj, section):
365380
# BOUT++
366381
ds.bout.fine_interpolation_factor = 8
367382

383+
if ("dump" in input_type or "restart" in input_type) and ds.metadata[
384+
"BOUT_VERSION"
385+
] < 4.0:
386+
# Add workarounds for missing information or different conventions in data saved
387+
# by BOUT++ v3.x.
388+
for v in ds:
389+
if ds.metadata["bout_zdim"] in ds[v].dims:
390+
# All fields saved on aligned grid for BOUT-3
391+
ds[v].attrs["direction_y"] = "Aligned"
392+
393+
added_location = False
394+
if any(
395+
d in ds[v].dims
396+
for d in (
397+
ds.metadata["bout_xdim"],
398+
ds.metadata["bout_ydim"],
399+
ds.metadata["bout_zdim"],
400+
)
401+
):
402+
# zShift, etc. did not support staggered grids in BOUT++ v3 anyway, so
403+
# just treat all variables as if they were at CELL_CENTRE
404+
ds[v].attrs["cell_location"] = "CELL_CENTRE"
405+
added_location = True
406+
if added_location:
407+
warn(
408+
"Detected data from BOUT++ v3.x. Treating all variables as being "
409+
"at `CELL_CENTRE`. Should be similar to what BOUT++ v3.x did, but "
410+
"if your code uses staggered grids, this may produce unexpected "
411+
"effects in some places."
412+
)
413+
414+
if "nz" not in ds.metadata:
415+
# `nz` used to be stored as `MZ` and `MZ` used to include an extra buffer
416+
# point that was not used for data.
417+
ds.metadata["nz"] = ds.metadata["MZ"] - 1
418+
368419
if info == "terse":
369420
print("Read in dataset from {}".format(str(Path(datapath))))
370421
elif info:
@@ -600,17 +651,40 @@ def _auto_open_mfboutdataset(
600651

601652
paths_grid, concat_dims = _arrange_for_concatenation(filepaths, nxpe, nype)
602653

603-
ds = xr.open_mfdataset(
604-
paths_grid,
605-
concat_dim=concat_dims,
606-
combine="nested",
607-
data_vars=data_vars,
608-
preprocess=_preprocess,
609-
engine=filetype,
610-
chunks=chunks,
611-
join="exact",
612-
**kwargs,
613-
)
654+
try:
655+
ds = xr.open_mfdataset(
656+
paths_grid,
657+
concat_dim=concat_dims,
658+
combine="nested",
659+
data_vars=data_vars,
660+
preprocess=_preprocess,
661+
engine=filetype,
662+
chunks=chunks,
663+
join="exact",
664+
**kwargs,
665+
)
666+
except ValueError as e:
667+
message_to_catch = (
668+
"some variables in data_vars are not data variables on the first "
669+
"dataset:"
670+
)
671+
if str(e)[: len(message_to_catch)] == message_to_catch:
672+
# Open concatenating any variables that are different in
673+
# different files as a work around to support opening older
674+
# data.
675+
ds = xr.open_mfdataset(
676+
paths_grid,
677+
concat_dim=concat_dims,
678+
combine="nested",
679+
data_vars="different",
680+
preprocess=_preprocess,
681+
engine=filetype,
682+
chunks=chunks,
683+
join="exact",
684+
**kwargs,
685+
)
686+
else:
687+
raise
614688
else:
615689
# datapath was nested list of Datasets
616690

@@ -744,8 +818,16 @@ def get_nonnegative_scalar(ds, key, default=1, info=True):
744818

745819
# Check whether this is a single file squashed from the multiple output files of a
746820
# parallel run (i.e. NXPE*NYPE > 1 even though there is only a single file to read).
747-
nx = ds["nx"].values
748-
ny = ds["ny"].values
821+
if "nx" in ds:
822+
nx = ds["nx"].values
823+
else:
824+
# Workaround for older data files
825+
nx = ds["MXSUB"].values * ds["NXPE"].values + 2 * ds["MXG"].values
826+
if "ny" in ds:
827+
ny = ds["ny"].values
828+
else:
829+
# Workaround for older data files
830+
ny = ds["MYSUB"].values * ds["NYPE"].values
749831
nx_file = ds.dims["x"]
750832
ny_file = ds.dims["y"]
751833
is_squashed_doublenull = False
@@ -758,7 +840,10 @@ def get_nonnegative_scalar(ds, key, default=1, info=True):
758840
mxg = 0
759841

760842
# Check if there are two divertor targets present
761-
if ds["jyseps1_2"] > ds["jyseps2_1"]:
843+
# Note: if jyseps2_1 and jyseps1_2 are not in ds it probably
844+
# indicates older data and likely the upper target boundary cells
845+
# were not saved anyway, so continue as if they were not.
846+
if "jyseps2_1" in ds and ds["jyseps1_2"] > ds["jyseps2_1"]:
762847
upper_target_cells = myg
763848
else:
764849
upper_target_cells = 0
@@ -771,7 +856,13 @@ def get_nonnegative_scalar(ds, key, default=1, info=True):
771856

772857
nxpe = 1
773858
nype = 1
774-
is_squashed_doublenull = (ds["jyseps2_1"] != ds["jyseps1_2"]).values
859+
if "jyseps2_1" in ds:
860+
is_squashed_doublenull = (ds["jyseps2_1"] != ds["jyseps1_2"]).values
861+
else:
862+
# For older data with no jyseps2_1 or jyseps1_2 in the
863+
# dataset, probably do not need to handle double null data
864+
# squashed with upper target points.
865+
is_squashed_doublenull = False
775866
elif ny_file == ny + 2 * myg:
776867
# Older squashed file from double-null grid but containing only lower
777868
# target boundary cells.

xbout/plotting/animate.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -600,7 +600,7 @@ def animate_line(
600600

601601
# Check plot is the right orientation
602602
t_read, x_read = data.dims
603-
if t_read is animate_over:
603+
if t_read == animate_over:
604604
x = x_read
605605
else:
606606
data = data.transpose(animate_over, t_read, transpose_coords=True)

0 commit comments

Comments
 (0)