|
27 | 27 | _parse_coord_option, |
28 | 28 | ) |
29 | 29 | from .region import _from_region |
30 | | -from .utils import _get_bounding_surfaces, _split_into_restarts |
| 30 | +from .utils import ( |
| 31 | + _add_cartesian_coordinates, |
| 32 | + _get_bounding_surfaces, |
| 33 | + _split_into_restarts, |
| 34 | +) |
31 | 35 |
|
32 | 36 |
|
33 | 37 | @xr.register_dataset_accessor("bout") |
@@ -542,6 +546,171 @@ def interpolate_from_unstructured( |
542 | 546 |
|
543 | 547 | return ds |
544 | 548 |
|
| 549 | + def interpolate_to_cartesian( |
| 550 | + self, nX=300, nY=300, nZ=100, *, use_float32=True, fill_value=np.nan |
| 551 | + ): |
| 552 | + """ |
| 553 | + Interpolate the Dataset to a regular Cartesian grid. |
| 554 | +
|
| 555 | + This method is intended to be used to produce data for visualisation, which |
| 556 | + normally does not require double-precision values, so by default the data is |
| 557 | + converted to `np.float32`. Pass `use_float32=False` to retain the original |
| 558 | + precision. |
| 559 | +
|
| 560 | + Parameters |
| 561 | + ---------- |
| 562 | + nX : int (default 300) |
| 563 | + Number of grid points in the X direction |
| 564 | + nY : int (default 300) |
| 565 | + Number of grid points in the Y direction |
| 566 | + nZ : int (default 100) |
| 567 | + Number of grid points in the Z direction |
| 568 | + use_float32 : bool (default True) |
| 569 | + Downgrade precision to `np.float32`? |
| 570 | + fill_value : float (default np.nan) |
| 571 | + Value to use for points outside the interpolation domain (passed to |
| 572 | + `scipy.RegularGridInterpolator`) |
| 573 | +
|
| 574 | + See Also |
| 575 | + -------- |
| 576 | + BoutDataArray.interpolate_to_cartesian |
| 577 | + """ |
| 578 | + ds = self.data |
| 579 | + ds = ds.bout.add_cartesian_coordinates() |
| 580 | + |
| 581 | + if not isinstance(use_float32, bool): |
| 582 | + raise ValueError(f"use_float32 must be a bool, got '{use_float32}'") |
| 583 | + if use_float32: |
| 584 | + float_type = np.float32 |
| 585 | + ds = ds.astype(float_type) |
| 586 | + for coord in ds.coords: |
| 587 | + # Coordinates are not converted by Dataset.astype, so convert explicitly |
| 588 | + ds[coord] = ds[coord].astype(float_type) |
| 589 | + fill_value = float_type(fill_value) |
| 590 | + else: |
| 591 | + float_type = ds[ds.data_vars[0]].dtype |
| 592 | + |
| 593 | + tdim = ds.metadata["bout_tdim"] |
| 594 | + zdim = ds.metadata["bout_zdim"] |
| 595 | + if tdim in ds.dims: |
| 596 | + nt = ds.sizes[tdim] |
| 597 | + n_toroidal = ds.sizes[zdim] |
| 598 | + |
| 599 | + # Create Cartesian grid to interpolate to |
| 600 | + Xmin = ds["X_cartesian"].min() |
| 601 | + Xmax = ds["X_cartesian"].max() |
| 602 | + Ymin = ds["Y_cartesian"].min() |
| 603 | + Ymax = ds["Y_cartesian"].max() |
| 604 | + Zmin = ds["Z_cartesian"].min() |
| 605 | + Zmax = ds["Z_cartesian"].max() |
| 606 | + newX_1d = xr.DataArray(np.linspace(Xmin, Xmax, nX), dims="X") |
| 607 | + newX = newX_1d.expand_dims({"Y": nY, "Z": nZ}, axis=[1, 2]) |
| 608 | + newY_1d = xr.DataArray(np.linspace(Ymin, Ymax, nY), dims="Y") |
| 609 | + newY = newY_1d.expand_dims({"X": nX, "Z": nZ}, axis=[0, 2]) |
| 610 | + newZ_1d = xr.DataArray(np.linspace(Zmin, Zmax, nZ), dims="Z") |
| 611 | + newZ = newZ_1d.expand_dims({"X": nX, "Y": nY}, axis=[0, 1]) |
| 612 | + newR = np.sqrt(newX**2 + newY**2) |
| 613 | + newzeta = np.arctan2(newY, newX) |
| 614 | + # Define newzeta in range 0->2*pi |
| 615 | + newzeta = np.where(newzeta < 0.0, newzeta + 2.0 * np.pi, newzeta) |
| 616 | + |
| 617 | + from scipy.interpolate import ( |
| 618 | + RegularGridInterpolator, |
| 619 | + griddata, |
| 620 | + ) |
| 621 | + |
| 622 | + # Create Cylindrical coordinates for intermediate grid |
| 623 | + Rcyl_min = float_type(ds["R"].min()) |
| 624 | + Rcyl_max = float_type(ds["R"].max()) |
| 625 | + Zcyl_min = float_type(ds["Z"].min()) |
| 626 | + Zcyl_max = float_type(ds["Z"].max()) |
| 627 | + n_Rcyl = int(round(nZ * (Rcyl_max - Rcyl_min) / (Zcyl_max - Zcyl_min))) |
| 628 | + Rcyl = xr.DataArray(np.linspace(Rcyl_min, Rcyl_max, 2 * n_Rcyl), dims="r") |
| 629 | + Zcyl = xr.DataArray(np.linspace(Zcyl_min, Zcyl_max, 2 * nZ), dims="z") |
| 630 | + |
| 631 | + # Create Dataset for result |
| 632 | + result = xr.Dataset() |
| 633 | + result.attrs["metadata"] = ds.metadata |
| 634 | + |
| 635 | + # Interpolate in two stages for efficiency. Unstructured 3d interpolation is |
| 636 | + # very slow. Unstructured 2d interpolation onto Cartesian (R, Z) grids, followed |
| 637 | + # by structured 3d interpolation onto the (X, Y, Z) grid, is much faster. |
| 638 | + # Structured 3d interpolation straight from (psi, theta, zeta) to (X, Y, Z) |
| 639 | + # leaves artifacts in the output, because theta does not vary continuously |
| 640 | + # everywhere (has branch cuts). |
| 641 | + |
| 642 | + zeta_out = np.zeros(n_toroidal + 1) |
| 643 | + zeta_out[:-1] = ds[zdim].values |
| 644 | + zeta_out[-1] = zeta_out[-2] + ds["dz"].mean() |
| 645 | + |
| 646 | + def interp_single_time(da): |
| 647 | + print(" interpolate poloidal planes") |
| 648 | + |
| 649 | + da_cyl = da.bout.interpolate_from_unstructured(R=Rcyl, Z=Zcyl).transpose( |
| 650 | + "R", "Z", zdim, missing_dims="ignore" |
| 651 | + ) |
| 652 | + |
| 653 | + if zdim not in da_cyl.dims: |
| 654 | + da_cyl = da_cyl.expand_dims({zdim: n_toroidal + 1}, axis=-1) |
| 655 | + else: |
| 656 | + # Impose toroidal periodicity by appending zdim=0 to end of array |
| 657 | + da_cyl = xr.concat((da_cyl, da_cyl.isel({zdim: 0})), zdim) |
| 658 | + |
| 659 | + print(" build 3d interpolator") |
| 660 | + interp = RegularGridInterpolator( |
| 661 | + (Rcyl.values, Zcyl.values, zeta_out), |
| 662 | + da_cyl.values, |
| 663 | + bounds_error=False, |
| 664 | + fill_value=fill_value, |
| 665 | + ) |
| 666 | + |
| 667 | + print(" do 3d interpolation") |
| 668 | + return interp( |
| 669 | + (newR, newZ, newzeta), |
| 670 | + method="linear", |
| 671 | + ) |
| 672 | + |
| 673 | + for name, da in ds.data_vars.items(): |
| 674 | + print(f"\ninterpolating {name}") |
| 675 | + # order of dimensions does not really matter here - output only depends on |
| 676 | + # shape of newR, newZ, newzeta. Possibly more efficient to assign the 2d |
| 677 | + # results in the loop to the last two dimensions, so put zeta first. Can't |
| 678 | + # just use da.min().item() here (to get a scalar value instead of a |
| 679 | + # zero-size array) because .item() doesn't work for dask arrays (yet!). |
| 680 | + |
| 681 | + datamin = float_type(da.min().values) |
| 682 | + datamax = float_type(da.max().values) |
| 683 | + |
| 684 | + if tdim in da.dims: |
| 685 | + data_cartesian = np.zeros((nt, nX, nY, nZ), dtype=float_type) |
| 686 | + for tind in range(nt): |
| 687 | + print(f" tind={tind}") |
| 688 | + data_cartesian[tind, :, :, :] = interp_single_time( |
| 689 | + da.isel({tdim: tind}) |
| 690 | + ) |
| 691 | + result[name] = xr.DataArray(data_cartesian, dims=[tdim, "X", "Y", "Z"]) |
| 692 | + else: |
| 693 | + data_cartesian = interp_single_time(da) |
| 694 | + result[name] = xr.DataArray(data_cartesian, dims=["X", "Y", "Z"]) |
| 695 | + |
| 696 | + # Copy metadata to data variables, in case it is needed |
| 697 | + result[name].attrs["metadata"] = ds.metadata |
| 698 | + |
| 699 | + result = result.assign_coords(X=newX_1d, Y=newY_1d, Z=newZ_1d) |
| 700 | + |
| 701 | + return result |
| 702 | + |
| 703 | + def add_cartesian_coordinates(self): |
| 704 | + """ |
| 705 | + Add Cartesian (X,Y,Z) coordinates. |
| 706 | +
|
| 707 | + Returns |
| 708 | + ------- |
| 709 | + Dataset with new coordinates added, which are named 'X_cartesian', |
| 710 | + 'Y_cartesian', and 'Z_cartesian' |
| 711 | + """ |
| 712 | + return _add_cartesian_coordinates(self.data) |
| 713 | + |
545 | 714 | def remove_yboundaries(self, **kwargs): |
546 | 715 | """ |
547 | 716 | Remove y-boundary points, if present, from the Dataset |
|
0 commit comments