diff --git a/climada/hazard/centroids/centr.py b/climada/hazard/centroids/centr.py index b02f7569d..df57fbdc3 100644 --- a/climada/hazard/centroids/centr.py +++ b/climada/hazard/centroids/centr.py @@ -138,14 +138,14 @@ def geometry(self): @property def on_land(self): """ Get the on_land property """ - if self.gdf.on_land.isna().all(): + if self.gdf['on_land'].isna().all(): return None return self.gdf['on_land'].values @property def region_id(self): """ Get the assigned region_id """ - if self.gdf.region_id.isna().all(): + if self.gdf['region_id'].isna().all(): return None return self.gdf['region_id'].values @@ -479,7 +479,7 @@ def select_mask(self, sel_cen=None, reg_id=None, extent=None): (self.lat >= lat_min) & (self.lat <= lat_max) ) return sel_cen - + def plot(self, *, axis=None, figsize=(9, 13), **kwargs): """Plot centroids geodataframe using geopandas and cartopy plotting functions. @@ -494,7 +494,7 @@ def plot(self, *, axis=None, figsize=(9, 13), **kwargs): positional arguments for geopandas.GeoDataFrame.plot kwargs : optional keyword arguments for geopandas.GeoDataFrame.plot - + Returns ------- ax : cartopy.mpl.geoaxes.GeoAxes instance diff --git a/climada/hazard/isimip_data.py b/climada/hazard/isimip_data.py index 6235bfda5..8630d59d7 100644 --- a/climada/hazard/isimip_data.py +++ b/climada/hazard/isimip_data.py @@ -56,6 +56,6 @@ def _read_one_nc(file_name, bbox=None, years=None): if not years: return data.sel(lat=slice(bbox[3], bbox[1]), lon=slice(bbox[0], bbox[2])) - time_id = years - int(data.time.units[12:16]) + time_id = years - int(data['time'].units[12:16]) return data.sel(lat=slice(bbox[3], bbox[1]), lon=slice(bbox[0], bbox[2]), time=slice(time_id[0], time_id[1])) diff --git a/climada/hazard/storm_europe.py b/climada/hazard/storm_europe.py index a0c5b7771..87c48a59f 100644 --- a/climada/hazard/storm_europe.py +++ b/climada/hazard/storm_europe.py @@ -334,18 +334,18 @@ def from_cosmoe_file(cls, fp_file, run_datetime, event_date=None, .stack(intensity=('y_1', 'x_1')) considered_dates = stacked['date'].values stacked = stacked.stack(date_ensemble=('date', 'epsd_1')) - stacked = stacked.where(stacked.VMAX_10M > intensity_thres) + stacked = stacked.where(stacked['VMAX_10M'] > intensity_thres) stacked = stacked.fillna(0) # fill in values from netCDF - intensity = sparse.csr_matrix(stacked.VMAX_10M.T) - event_id = np.arange(stacked.date_ensemble.size) + 1 + intensity = sparse.csr_matrix(stacked['VMAX_10M'].T) + event_id = np.arange(stacked['date_ensemble'].size) + 1 date = np.repeat( np.array(datetime64_to_ordinal(considered_dates)), - np.unique(ncdf.epsd_1).size + np.unique(ncdf['epsd_1']).size ) orig = np.full_like(event_id, False) - orig[(stacked.epsd_1 == 0).values] = True + orig[(stacked['epsd_1'] == 0).values] = True if description is None: description = (model_name + ' weather forecast windfield ' + @@ -362,10 +362,10 @@ def from_cosmoe_file(cls, fp_file, run_datetime, event_date=None, date=date, event_name=[date_i + '_ens' + str(ens_i) for date_i, ens_i - in zip(date_to_str(date), stacked.epsd_1.values + 1)], + in zip(date_to_str(date), stacked['epsd_1'].values + 1)], frequency=np.divide( np.ones_like(event_id), - np.unique(ncdf.epsd_1).size), + np.unique(ncdf['epsd_1']).size), ) # close netcdf file @@ -477,30 +477,30 @@ def from_icon_grib(cls, run_datetime, event_date=None, model_name='icon-eu-eps', stacked = stacked.where(stacked > intensity_thres) stacked = stacked.fillna(0) - event_id = np.arange(stacked.date_ensemble.size) + 1 + event_id = np.arange(stacked['date_ensemble'].size) + 1 date = np.repeat( np.array(datetime64_to_ordinal(considered_dates)), - np.unique(stacked.number).size + np.unique(stacked['number']).size ) orig = np.full_like(event_id, False) - orig[(stacked.number == 1).values] = True + orig[(stacked['number'] == 1).values] = True if description is None: description = ('icon weather forecast windfield for run started at ' + run_datetime.strftime('%Y%m%d%H')) # Create Hazard haz = cls( - intensity=sparse.csr_matrix(stacked.gust.T), + intensity=sparse.csr_matrix(stacked['gust'].T), centroids=cls._centroids_from_nc(nc_centroids_file), event_id=event_id, date=date, orig=orig, event_name=[date_i + '_ens' + str(ens_i) for date_i, ens_i - in zip(date_to_str(date), stacked.number.values)], + in zip(date_to_str(date), stacked['number'].values)], frequency=np.divide( np.ones_like(event_id), - np.unique(stacked.number).size), + np.unique(stacked['number']).size), ) haz.check() @@ -527,25 +527,25 @@ def _centroids_from_nc(file_name): ncdf = xr.open_dataset(file_name) create_meshgrid = True if hasattr(ncdf, 'latitude'): - lats = ncdf.latitude.data - lons = ncdf.longitude.data + lats = ncdf['latitude'].data + lons = ncdf['longitude'].data elif hasattr(ncdf, 'lat'): - lats = ncdf.lat.data - lons = ncdf.lon.data + lats = ncdf['lat'].data + lons = ncdf['lon'].data elif hasattr(ncdf, 'lat_1'): - if len(ncdf.lon_1.shape)>1 & \ - (ncdf.lon_1.shape == ncdf.lat_1.shape) \ + if len(ncdf['lon_1'].shape)>1 & \ + (ncdf['lon_1'].shape == ncdf['lat_1'].shape) \ : - lats = ncdf.lat_1.data.flatten() - lons = ncdf.lon_1.data.flatten() + lats = ncdf['lat_1'].data.flatten() + lons = ncdf['lon_1'].data.flatten() create_meshgrid = False else: - lats = ncdf.lat_1.data - lons = ncdf.lon_1.data + lats = ncdf['lat_1'].data + lons = ncdf['lon_1'].data elif hasattr(ncdf, 'clat'): - lats = ncdf.clat.data - lons = ncdf.clon.data - if ncdf.clat.attrs['units']=='radian': + lats = ncdf['clat'].data + lons = ncdf['clon'].data + if ncdf['clat'].attrs['units']=='radian': lats = np.rad2deg(lats) lons = np.rad2deg(lons) create_meshgrid = False @@ -713,16 +713,16 @@ def plot_ssi(self, full_area=False): 'orig': self.orig, }) ssi_freq = ssi_freq.sort_values('ssi', ascending=False) - ssi_freq['freq_cum'] = np.cumsum(ssi_freq.freq) + ssi_freq['freq_cum'] = np.cumsum(ssi_freq['freq']) - ssi_hist = ssi_freq.loc[ssi_freq.orig].copy() - ssi_hist.freq = ssi_hist.freq * self.orig.size / self.orig.sum() - ssi_hist['freq_cum'] = np.cumsum(ssi_hist.freq) + ssi_hist = ssi_freq.loc[ssi_freq['orig']].copy() + ssi_hist['freq'] = ssi_hist['freq'] * self.orig.size / self.orig.sum() + ssi_hist['freq_cum'] = np.cumsum(ssi_hist['freq']) # plotting fig, axs = plt.subplots() - axs.plot(ssi_freq.freq_cum, ssi_freq.ssi, label='All Events') - axs.scatter(ssi_hist.freq_cum, ssi_hist.ssi, + axs.plot(ssi_freq['freq_cum'], ssi_freq['ssi'], label='All Events') + axs.scatter(ssi_hist['freq_cum'], ssi_hist['ssi'], color='red', label='Historic Events') axs.legend() axs.set_xlabel('Exceedance Frequency [1/a]') diff --git a/climada/hazard/tc_clim_change.py b/climada/hazard/tc_clim_change.py index 21b40a0b0..5230ac247 100644 --- a/climada/hazard/tc_clim_change.py +++ b/climada/hazard/tc_clim_change.py @@ -159,7 +159,7 @@ def calc_scale_knutson(ref_year=2050, rcp_scenario=45): rad_force = pd.read_excel(TOT_RADIATIVE_FORCE) years = np.array([year for year in rad_force.columns if isinstance(year, int)]) rad_rcp = np.array([int(float(sce[sce.index('.') - 1:sce.index('.') + 2]) * 10) - for sce in rad_force.Scenario if isinstance(sce, str)]) + for sce in rad_force['Scenario'] if isinstance(sce, str)]) # mean values for Knutson values rf_vals = np.argwhere(rad_rcp == rcp_knu).reshape(-1)[0] diff --git a/climada/hazard/tc_tracks.py b/climada/hazard/tc_tracks.py index 7023fe329..8ab5e5801 100644 --- a/climada/hazard/tc_tracks.py +++ b/climada/hazard/tc_tracks.py @@ -518,7 +518,7 @@ def from_ibtracs_netcdf(cls, provider=None, rescale_windspeeds=True, storm_id=No return cls() ibtracs_ds = ibtracs_ds.sel(storm=match) - ibtracs_ds['valid_t'] = ibtracs_ds.time.notnull() + ibtracs_ds['valid_t'] = ibtracs_ds['time'].notnull() if rescale_windspeeds: for agency in IBTRACS_AGENCIES: @@ -585,13 +585,13 @@ def from_ibtracs_netcdf(cls, provider=None, rescale_windspeeds=True, storm_id=No if estimate_missing: ibtracs_ds['pres'][:] = _estimate_pressure( - ibtracs_ds.pres, ibtracs_ds.lat, ibtracs_ds.lon, ibtracs_ds.wind) + ibtracs_ds['pres'], ibtracs_ds['lat'], ibtracs_ds['lon'], ibtracs_ds['wind']) ibtracs_ds['wind'][:] = _estimate_vmax( - ibtracs_ds.wind, ibtracs_ds.lat, ibtracs_ds.lon, ibtracs_ds.pres) + ibtracs_ds['wind'], ibtracs_ds['lat'], ibtracs_ds['lon'], ibtracs_ds['pres']) - ibtracs_ds['valid_t'] &= (ibtracs_ds.lat.notnull() & ibtracs_ds.lon.notnull() - & ibtracs_ds.wind.notnull() & ibtracs_ds.pres.notnull()) - valid_storms_mask = ibtracs_ds.valid_t.any(dim="date_time") + ibtracs_ds['valid_t'] &= (ibtracs_ds['lat'].notnull() & ibtracs_ds['lon'].notnull() + & ibtracs_ds['wind'].notnull() & ibtracs_ds['pres'].notnull()) + valid_storms_mask = ibtracs_ds['valid_t'].any(dim="date_time") invalid_storms_idx = np.nonzero(~valid_storms_mask.data)[0] if invalid_storms_idx.size > 0: invalid_sids = list(ibtracs_ds.sid.sel(storm=invalid_storms_idx).astype(str).data) @@ -601,7 +601,7 @@ def from_ibtracs_netcdf(cls, provider=None, rescale_windspeeds=True, storm_id=No ibtracs_ds = ibtracs_ds.sel(storm=valid_storms_mask) if discard_single_points: - valid_storms_mask = ibtracs_ds.valid_t.sum(dim="date_time") > 1 + valid_storms_mask = ibtracs_ds['valid_t'].sum(dim="date_time") > 1 invalid_storms_idx = np.nonzero(~valid_storms_mask.data)[0] if invalid_storms_idx.size > 0: invalid_sids = list(ibtracs_ds.sid.sel(storm=invalid_storms_idx).astype(str).data) @@ -617,7 +617,7 @@ def from_ibtracs_netcdf(cls, provider=None, rescale_windspeeds=True, storm_id=No 'requirements.') return cls() - max_wind = ibtracs_ds.wind.max(dim="date_time").data.ravel() + max_wind = ibtracs_ds['wind'].max(dim="date_time").data.ravel() category_test = (max_wind[:, None] < np.array(SAFFIR_SIM_CAT)[None]) category = np.argmax(category_test, axis=1) - 1 basin_map = {b.encode("utf-8"): v for b, v in BASIN_ENV_PRESSURE.items()} @@ -629,7 +629,7 @@ def from_ibtracs_netcdf(cls, provider=None, rescale_windspeeds=True, storm_id=No last_perc = 0 all_tracks = [] - for i_track, t_msk in enumerate(ibtracs_ds.valid_t.data): + for i_track, t_msk in enumerate(ibtracs_ds['valid_t'].data): perc = 100 * len(all_tracks) / ibtracs_ds.sid.size if perc - last_perc >= 10: LOGGER.info("Progress: %d%%", perc) @@ -647,42 +647,42 @@ def from_ibtracs_netcdf(cls, provider=None, rescale_windspeeds=True, storm_id=No # A track that crosses the antimeridian in IBTrACS might be truncated by `t_msk` in # such a way that the remaining part is not crossing the antimeridian: - if (track_ds.lon.values > 180).all(): + if (track_ds['lon'].lon.values > 180).all(): track_ds['lon'] -= 360 # set time_step in hours - track_ds['time_step'] = xr.ones_like(track_ds.time, dtype=float) - if track_ds.time.size > 1: - track_ds.time_step.values[1:] = (track_ds.time.diff(dim="date_time") + track_ds['time_step'] = xr.ones_like(track_ds['time'], dtype=float) + if track_ds['time'].size > 1: + track_ds['time_step'].values[1:] = (track_ds['time'].diff(dim="date_time") / np.timedelta64(1, 'h')) - track_ds.time_step.values[0] = track_ds.time_step[1] + track_ds['time_step'].values[0] = track_ds['time_step'][1] with warnings.catch_warnings(): # See https://github.com/pydata/xarray/issues/4167 warnings.simplefilter(action="ignore", category=FutureWarning) - track_ds['rmw'] = track_ds.rmw \ + track_ds['rmw'] = track_ds['rmw'] \ .ffill(dim='date_time', limit=1) \ .bfill(dim='date_time', limit=1) \ .fillna(0) - track_ds['roci'] = track_ds.roci \ + track_ds['roci'] = track_ds['roci'] \ .ffill(dim='date_time', limit=1) \ .bfill(dim='date_time', limit=1) \ .fillna(0) - track_ds['poci'] = track_ds.poci \ + track_ds['poci'] = track_ds['poci'] \ .ffill(dim='date_time', limit=4) \ .bfill(dim='date_time', limit=4) # this is the most time consuming line in the processing: - track_ds['poci'] = track_ds.poci.fillna(tr_basin_penv) + track_ds['poci'] = track_ds['poci'].fillna(tr_basin_penv) if estimate_missing: - track_ds['rmw'][:] = estimate_rmw(track_ds.rmw.values, track_ds.pres.values) - track_ds['roci'][:] = estimate_roci(track_ds.roci.values, track_ds.pres.values) - track_ds['roci'][:] = np.fmax(track_ds.rmw.values, track_ds.roci.values) + track_ds['rmw'][:] = estimate_rmw(track_ds['rmw'].values, track_ds['pres'].values) + track_ds['roci'][:] = estimate_roci(track_ds['roci'].values, track_ds['pres'].values) + track_ds['roci'][:] = np.fmax(track_ds['rmw'].values, track_ds['roci'].values) # ensure environmental pressure >= central pressure # this is the second most time consuming line in the processing: - track_ds['poci'][:] = np.fmax(track_ds.poci, track_ds.pres) + track_ds['poci'][:] = np.fmax(track_ds['poci'], track_ds['pres']) provider_str = f"ibtracs_{provider[0]}" if len(provider) > 1: @@ -691,16 +691,16 @@ def from_ibtracs_netcdf(cls, provider=None, rescale_windspeeds=True, storm_id=No for v in phys_vars) data_vars = { - 'radius_max_wind': ('time', track_ds.rmw.data), - 'radius_oci': ('time', track_ds.roci.data), - 'max_sustained_wind': ('time', track_ds.wind.data), - 'central_pressure': ('time', track_ds.pres.data), - 'environmental_pressure': ('time', track_ds.poci.data), + 'radius_max_wind': ('time', track_ds['rmw'].data), + 'radius_oci': ('time', track_ds['roci'].data), + 'max_sustained_wind': ('time', track_ds['wind'].data), + 'central_pressure': ('time', track_ds['pres'].data), + 'environmental_pressure': ('time', track_ds['poci'].data), } coords = { - 'time': ('time', track_ds.time.dt.round('s').data), - 'lat': ('time', track_ds.lat.data), - 'lon': ('time', track_ds.lon.data), + 'time': ('time', track_ds['time'].dt.round('s').data), + 'lat': ('time', track_ds['lat'].data), + 'lon': ('time', track_ds['lon'].data), } attrs = { 'max_sustained_wind_unit': 'kn', @@ -840,24 +840,24 @@ def from_simulations_chaz(cls, file_names, year_range=None, ensemble_nums=None): for path in get_file_names(file_names): LOGGER.info('Reading %s.', path) chaz_ds = xr.open_dataset(path) - chaz_ds.time.attrs["units"] = "days since 1950-1-1" - chaz_ds.time.attrs["missing_value"] = -54786.0 + chaz_ds['time'].attrs["units"] = "days since 1950-1-1" + chaz_ds['time'].attrs["missing_value"] = -54786.0 chaz_ds = xr.decode_cf(chaz_ds) - chaz_ds['id_no'] = chaz_ds.stormID * 1000 + chaz_ds.ensembleNum + chaz_ds['id_no'] = chaz_ds['stormID'] * 1000 + chaz_ds['ensembleNum'] for var in ['time', 'longitude', 'latitude']: - chaz_ds[var] = chaz_ds[var].expand_dims(ensembleNum=chaz_ds.ensembleNum) + chaz_ds[var] = chaz_ds[var].expand_dims(ensembleNum=chaz_ds['ensembleNum']) chaz_ds = chaz_ds.stack(id=("ensembleNum", "stormID")) - years_uniq = chaz_ds.time.dt.year.data + years_uniq = chaz_ds['time'].dt.year.data years_uniq = np.unique(years_uniq[~np.isnan(years_uniq)]) LOGGER.info("File contains %s tracks (at most %s nodes each), " "representing %s years (%d-%d).", - chaz_ds.id_no.size, chaz_ds.lifelength.size, + chaz_ds['id_no'].size, chaz_ds['lifelength'].size, years_uniq.size, years_uniq[0], years_uniq[-1]) # filter by year range if given if year_range: - match = ((chaz_ds.time.dt.year >= year_range[0]) - & (chaz_ds.time.dt.year <= year_range[1])).sel(lifelength=0) + match = ((chaz_ds['time'].dt.year >= year_range[0]) + & (chaz_ds['time'].dt.year <= year_range[1])).sel(lifelength=0) if np.count_nonzero(match) == 0: LOGGER.info('No tracks in time range (%s, %s).', *year_range) continue @@ -865,15 +865,15 @@ def from_simulations_chaz(cls, file_names, year_range=None, ensemble_nums=None): # filter by ensembleNum if given if ensemble_nums is not None: - match = np.isin(chaz_ds.ensembleNum.values, ensemble_nums) + match = np.isin(chaz_ds['ensembleNum'].values, ensemble_nums) if np.count_nonzero(match) == 0: LOGGER.info('No tracks with specified ensemble numbers.') continue chaz_ds = chaz_ds.sel(id=match) # remove invalid tracks from selection - chaz_ds['valid_t'] = chaz_ds.time.notnull() & chaz_ds.Mwspd.notnull() - valid_st = chaz_ds.valid_t.any(dim="lifelength") + chaz_ds['valid_t'] = chaz_ds['time'].notnull() & chaz_ds['Mwspd'].notnull() + valid_st = chaz_ds['valid_t'].any(dim="lifelength") invalid_st = np.nonzero(~valid_st.data)[0] if invalid_st.size > 0: LOGGER.info('No valid Mwspd values found for %d out of %d storm tracks.', @@ -881,56 +881,56 @@ def from_simulations_chaz(cls, file_names, year_range=None, ensemble_nums=None): chaz_ds = chaz_ds.sel(id=valid_st) # estimate central pressure from location and max wind - chaz_ds['pres'] = xr.full_like(chaz_ds.Mwspd, -1, dtype=float) + chaz_ds['pres'] = xr.full_like(chaz_ds['Mwspd'], -1, dtype=float) chaz_ds['pres'][:] = _estimate_pressure( - chaz_ds.pres, chaz_ds.latitude, chaz_ds.longitude, chaz_ds.Mwspd) + chaz_ds['pres'], chaz_ds['latitude'], chaz_ds['longitude'], chaz_ds['Mwspd']) # compute time stepsizes - chaz_ds['time_step'] = xr.zeros_like(chaz_ds.time, dtype=float) - chaz_ds['time_step'][1:, :] = (chaz_ds.time.diff(dim="lifelength") + chaz_ds['time_step'] = xr.zeros_like(chaz_ds['time'], dtype=float) + chaz_ds['time_step'][1:, :] = (chaz_ds['time'].diff(dim="lifelength") / np.timedelta64(1, 'h')) - chaz_ds['time_step'][0, :] = chaz_ds.time_step[1, :] + chaz_ds['time_step'][0, :] = chaz_ds['time_step'][1, :] # determine Saffir-Simpson category - max_wind = chaz_ds.Mwspd.max(dim="lifelength").data.ravel() + max_wind = chaz_ds['Mwspd'].max(dim="lifelength").data.ravel() category_test = (max_wind[:, None] < np.array(SAFFIR_SIM_CAT)[None]) chaz_ds['category'] = ("id", np.argmax(category_test, axis=1) - 1) fname = Path(path).name - chaz_ds.time[:] = chaz_ds.time.dt.round('s').data - chaz_ds['radius_max_wind'] = xr.full_like(chaz_ds.pres, np.nan) - chaz_ds['environmental_pressure'] = xr.full_like(chaz_ds.pres, DEF_ENV_PRESSURE) + chaz_ds['time'][:] = chaz_ds['time'].dt.round('s').data + chaz_ds['radius_max_wind'] = xr.full_like(chaz_ds['pres'], np.nan) + chaz_ds['environmental_pressure'] = xr.full_like(chaz_ds['pres'], DEF_ENV_PRESSURE) chaz_ds["track_name"] = ("id", [f"{fname}-{track_id.item()[1]}-{track_id.item()[0]}" - for track_id in chaz_ds.id]) + for track_id in chaz_ds['id']]) # add tracks one by one last_perc = 0 - for cnt, i_track in enumerate(chaz_ds.id_no): - perc = 100 * cnt / chaz_ds.id_no.size + for cnt, i_track in enumerate(chaz_ds['id_no']): + perc = 100 * cnt / chaz_ds['id_no'].size if perc - last_perc >= 10: LOGGER.info("Progress: %d%%", perc) last_perc = perc - track_ds = chaz_ds.sel(id=i_track.id.item()) - track_ds = track_ds.sel(lifelength=track_ds.valid_t.data) + track_ds = chaz_ds.sel(id=i_track['id'].item()) + track_ds = track_ds.sel(lifelength=track_ds['valid_t'].data) data.append(xr.Dataset({ - 'time_step': ('time', track_ds.time_step.values), - 'max_sustained_wind': ('time', track_ds.Mwspd.values), - 'central_pressure': ('time', track_ds.pres.values), - 'radius_max_wind': ('time', track_ds.radius_max_wind.values), - 'environmental_pressure': ('time', track_ds.environmental_pressure.values), - 'basin': ('time', np.full(track_ds.time.size, "GB", dtype=" 1 - else Point(track.lon.data, track.lat.data) + LineString(np.c_[track['lon'], track['lat']]) if track['lon'].size > 1 + else Point(track['lon'].data, track['lat'].data) for track in self.data ]) gdf.crs = DEF_CRS @@ -1529,15 +1529,15 @@ def _one_interp_data(track, time_step_h, land_geom=None): """ if time_step_h is None: return track - if track.time.size < 2: + if track['time'].size < 2: LOGGER.warning('Track interpolation not done. ' 'Not enough elements for %s', track.name) track_int = track else: - method = ['linear', 'quadratic', 'cubic'][min(2, track.time.size - 2)] + method = ['linear', 'quadratic', 'cubic'][min(2, track['time'].size - 2)] # handle change of sign in longitude - lon = u_coord.lon_normalize(track.lon.copy(), center=0) + lon = u_coord.lon_normalize(track['lon'].copy(), center=0) if (lon < -170).any() and (lon > 170).any(): # crosses 180 degrees east/west -> use positive degrees east lon[lon < 0] += 360 @@ -1552,14 +1552,14 @@ def _one_interp_data(track, time_step_h, land_geom=None): lon_int = lon.resample(time=time_step).interpolate(method) lon_int[lon_int > 180] -= 360 track_int.coords['lon'] = lon_int - track_int.coords['lat'] = track.lat.resample(time=time_step)\ + track_int.coords['lat'] = track['lat'].resample(time=time_step)\ .interpolate(method) track_int.attrs['category'] = set_category( - track_int.max_sustained_wind.values, + track_int['max_sustained_wind'].values, track_int.max_sustained_wind_unit) # restrict to time steps within original bounds track_int = track_int.sel( - time=(track.time[0] <= track_int.time) & (track_int.time <= track.time[-1])) + time=(track['time'][0] <= track_int['time']) & (track_int['time'] <= track['time'][-1])) if land_geom: track_land_params(track_int, land_geom) @@ -1703,8 +1703,8 @@ def _read_one_gettelman(nc_data, i_track): # construct xarray tr_ds = xr.Dataset.from_dataframe(tr_df.set_index('time')) - tr_ds.coords['lat'] = ('time', tr_ds.lat.values) - tr_ds.coords['lon'] = ('time', tr_ds.lon.values) + tr_ds.coords['lat'] = ('time', tr_ds['lat'].values) + tr_ds.coords['lon'] = ('time', tr_ds['lon'].values) tr_ds['basin'] = tr_ds['basin'].astype(' 0: # Assume the landfall started between this and the previous point - orig_lf[i_lf][0] = track.lat[lf_point - 1] + \ - (track.lat[lf_point] - track.lat[lf_point - 1]) / 2 - orig_lf[i_lf][1] = track.lon[lf_point - 1] + \ - (track.lon[lf_point] - track.lon[lf_point - 1]) / 2 + orig_lf[i_lf][0] = track['lat'][lf_point - 1] + \ + (track['lat'][lf_point] - track['lat'][lf_point - 1]) / 2 + orig_lf[i_lf][1] = track['lon'][lf_point - 1] + \ + (track['lon'][lf_point] - track['lon'][lf_point - 1]) / 2 else: # track starts over land, assume first 'landfall' starts here - orig_lf[i_lf][0] = track.lat[lf_point] - orig_lf[i_lf][1] = track.lon[lf_point] + orig_lf[i_lf][0] = track['lat'][lf_point] + orig_lf[i_lf][1] = track['lon'][lf_point] dist = DistanceMetric.get_metric('haversine') - nodes1 = np.radians(np.array([track.lat.values[1:], - track.lon.values[1:]]).transpose()) - nodes0 = np.radians(np.array([track.lat.values[:-1], - track.lon.values[:-1]]).transpose()) + nodes1 = np.radians(np.array([track['lat'].values[1:], + track['lon'].values[1:]]).transpose()) + nodes0 = np.radians(np.array([track['lat'].values[:-1], + track['lon'].values[:-1]]).transpose()) dist_since_lf[1:] = dist.pairwise(nodes1, nodes0).diagonal() - dist_since_lf[~track.on_land.values] = 0.0 - nodes1 = np.array([track.lat.values[sea_land_idx], - track.lon.values[sea_land_idx]]).transpose() / 180 * np.pi + dist_since_lf[~track['on_land'].values] = 0.0 + nodes1 = np.array([track['lat'].values[sea_land_idx], + track['lon'].values[sea_land_idx]]).transpose() / 180 * np.pi dist_since_lf[sea_land_idx] = \ dist.pairwise(nodes1, orig_lf / 180 * np.pi).diagonal() for sea_land, land_sea in zip(sea_land_idx, land_sea_idx): @@ -1979,7 +1979,7 @@ def _dist_since_lf(track): np.cumsum(dist_since_lf[sea_land:land_sea]) dist_since_lf *= EARTH_RADIUS_KM - dist_since_lf[~track.on_land.values] = np.nan + dist_since_lf[~track['on_land'].values] = np.nan return dist_since_lf @@ -2005,13 +2005,13 @@ def _get_landfall_idx(track, include_starting_landfall=False): ends over land, the last value is set to track.time.size. """ # Index in land that comes from previous sea index - sea_land_idx = np.where(np.diff(track.on_land.astype(int)) == 1)[0] + 1 + sea_land_idx = np.where(np.diff(track['on_land'].astype(int)) == 1)[0] + 1 # Index in sea that comes from previous land index - land_sea_idx = np.where(np.diff(track.on_land.astype(int)) == -1)[0] + 1 - if track.on_land[-1]: + land_sea_idx = np.where(np.diff(track['on_land'].astype(int)) == -1)[0] + 1 + if track['on_land'][-1]: # track ends over land: add last track point as the end of that landfall - land_sea_idx = np.append(land_sea_idx, track.time.size) - if track.on_land[0]: + land_sea_idx = np.append(land_sea_idx, track['time'].size) + if track['on_land'][0]: # track starts over land: remove first land-to-sea transition (not a landfall)? if include_starting_landfall: sea_land_idx = np.append(0, sea_land_idx) @@ -2294,9 +2294,9 @@ def ibtracs_track_agency(ds_sel): agency_map[b''] = agency_map[b'wmo'] agency_fun = lambda x: agency_map[x] if "track_agency" not in ds_sel.data_vars.keys(): - ds_sel['track_agency'] = ds_sel.wmo_agency.where(ds_sel.wmo_agency != b'', - ds_sel.usa_agency) - track_agency_ix = xr.apply_ufunc(agency_fun, ds_sel.track_agency, vectorize=True) + ds_sel['track_agency'] = ds_sel['wmo_agency'].where(ds_sel['wmo_agency'] != b'', + ds_sel['usa_agency']) + track_agency_ix = xr.apply_ufunc(agency_fun, ds_sel['track_agency'], vectorize=True) return agency_pref, track_agency_ix def ibtracs_add_official_variable(ibtracs_ds, tc_var, add_3h=False): @@ -2319,7 +2319,7 @@ def ibtracs_add_official_variable(ibtracs_ds, tc_var, add_3h=False): """ if "nan_var" not in ibtracs_ds.data_vars.keys(): # add an array full of NaN as a fallback value in the procedure - ibtracs_ds['nan_var'] = xr.full_like(ibtracs_ds.lat, np.nan) + ibtracs_ds['nan_var'] = xr.full_like(ibtracs_ds['lat'], np.nan) # determine which of the official agencies report this variable at all available_agencies = [a for a in IBTRACS_AGENCIES @@ -2337,7 +2337,7 @@ def ibtracs_add_official_variable(ibtracs_ds, tc_var, add_3h=False): # read from officially responsible agencies that report this variable, but only # at official reporting times (usually 6-hourly) official_agency_ix = xr.apply_ufunc( - lambda x: agency_map[x], ibtracs_ds.wmo_agency, vectorize=True) + lambda x: agency_map[x], ibtracs_ds['wmo_agency'], vectorize=True) available_cols = ['nan_var'] + [f'{a}_{tc_var}' for a in available_agencies] all_vals = ibtracs_ds[available_cols].to_array(dim='agency') ibtracs_ds[f'official_{tc_var}'] = all_vals.isel(agency=official_agency_ix) diff --git a/climada/hazard/tc_tracks_synth.py b/climada/hazard/tc_tracks_synth.py index 7027a0c66..3ff4290a9 100644 --- a/climada/hazard/tc_tracks_synth.py +++ b/climada/hazard/tc_tracks_synth.py @@ -181,15 +181,15 @@ def calc_perturbed_trajectories(tracks, # hence sum is nb_synth_tracks * (2 + 2*(size-1)) = nb_synth_tracks * 2 * size # https://stats.stackexchange.com/questions/48086/algorithm-to-produce-autocorrelated-uniformly-distributed-number if autocorr_ddirection == 0 and autocorr_dspeed == 0: - random_vec = [np.random.uniform(size=nb_synth_tracks * (2 * track.time.size)) + random_vec = [np.random.uniform(size=nb_synth_tracks * (2 * track['time'].size)) for track in tracks.data] else: random_vec = [np.concatenate((np.random.uniform(size=nb_synth_tracks * 2), - _random_uniform_ac(nb_synth_tracks * (track.time.size - 1), + _random_uniform_ac(nb_synth_tracks * (track['time'].size - 1), autocorr_ddirection, time_step_h), - _random_uniform_ac(nb_synth_tracks * (track.time.size - 1), + _random_uniform_ac(nb_synth_tracks * (track['time'].size - 1), autocorr_dspeed, time_step_h))) - if track.time.size > 1 else np.random.uniform(size=nb_synth_tracks * 2) + if track['time'].size > 1 else np.random.uniform(size=nb_synth_tracks * 2) for track in tracks.data] if pool: @@ -280,7 +280,7 @@ def _one_rnd_walk(track, nb_synth_tracks, max_shift_ini, max_dspeed_rel, max_ddi latitudes with a wind speed up to TC category 1. """ ens_track = list() - n_dat = track.time.size + n_dat = track['time'].size n_seg = n_dat - 1 xy_ini = max_shift_ini * (2 * rnd_vec[:2 * nb_synth_tracks].reshape((2, nb_synth_tracks)) - 1) [dt] = np.unique(track['time_step']) @@ -293,32 +293,32 @@ def _one_rnd_walk(track, nb_synth_tracks, max_shift_ini, max_dspeed_rel, max_ddi # select angular perturbation for that synthetic track i_start_ang = 2 * nb_synth_tracks + i_ens * n_seg - i_end_ang = i_start_ang + track.time.size - 1 + i_end_ang = i_start_ang + track['time'].size - 1 # scale by maximum perturbation and time step in hour (temporal-resolution independent) ang_pert = dt * np.degrees(max_ddirection * (2 * rnd_vec[i_start_ang:i_end_ang] - 1)) ang_pert_cum = np.cumsum(ang_pert) # select translational speed perturbation for that synthetic track i_start_trans = 2 * nb_synth_tracks + nb_synth_tracks * n_seg + i_ens * n_seg - i_end_trans = i_start_trans + track.time.size - 1 + i_end_trans = i_start_trans + track['time'].size - 1 # scale by maximum perturbation and time step in hour (temporal-resolution independent) trans_pert = 1 + max_dspeed_rel * (2 * rnd_vec[i_start_trans:i_end_trans] - 1) # get bearings and angular distance for the original track - bearings = _get_bearing_angle(i_track.lon.values, i_track.lat.values) - angular_dist = climada.util.coordinates.dist_approx(i_track.lat.values[:-1, None], - i_track.lon.values[:-1, None], - i_track.lat.values[1:, None], - i_track.lon.values[1:, None], + bearings = _get_bearing_angle(i_track['lon'].values, i_track['lat'].values) + angular_dist = climada.util.coordinates.dist_approx(i_track['lat'].values[:-1, None], + i_track['lon'].values[:-1, None], + i_track['lat'].values[1:, None], + i_track['lon'].values[1:, None], method="geosphere", units="degree")[:, 0, 0] # apply perturbation to lon / lat - new_lon = np.zeros_like(i_track.lon.values) - new_lat = np.zeros_like(i_track.lat.values) - new_lon[0] = i_track.lon.values[0] + xy_ini[0, i_ens] - new_lat[0] = i_track.lat.values[0] + xy_ini[1, i_ens] - last_idx = i_track.time.size + new_lon = np.zeros_like(i_track['lon'].values) + new_lat = np.zeros_like(i_track['lat'].values) + new_lon[0] = i_track['lon'].values[0] + xy_ini[0, i_ens] + new_lat[0] = i_track['lat'].values[0] + xy_ini[1, i_ens] + last_idx = i_track['time'].size for i in range(0, len(new_lon) - 1): new_lon[i + 1], new_lat[i + 1] = \ _get_destination_points(new_lon[i], new_lat[i], @@ -330,7 +330,7 @@ def _one_rnd_walk(track, nb_synth_tracks, max_shift_ini, max_dspeed_rel, max_ddi if i+2 < last_idx and (new_lat[i + 1] > 70 or new_lat[i + 1] < -70): last_idx = i + 2 # end the track here - max_wind_end = i_track.max_sustained_wind.values[last_idx] + max_wind_end = i_track['max_sustained_wind'].values[last_idx] ss_scale_end = climada.hazard.tc_tracks.set_category(max_wind_end, i_track.max_sustained_wind_unit) # TC category at ending point should not be higher than 1 @@ -344,8 +344,8 @@ def _one_rnd_walk(track, nb_synth_tracks, max_shift_ini, max_dspeed_rel, max_ddi # make sure longitude values are within (-180, 180) climada.util.coordinates.lon_normalize(new_lon, center=0.0) - i_track.lon.values = new_lon - i_track.lat.values = new_lat + i_track['lon'].values = new_lon + i_track['lat'].values = new_lat i_track.attrs['orig_event_flag'] = False i_track.attrs['name'] = f"{i_track.attrs['name']}_gen{i_ens + 1}" i_track.attrs['sid'] = f"{i_track.attrs['sid']}_gen{i_ens + 1}" @@ -642,8 +642,8 @@ def _apply_land_decay(tracks, v_rel, p_rel, land_geom, s_rel=True, if check_plot: orig_wind, orig_pres = [], [] for track in sy_tracks: - orig_wind.append(np.copy(track.max_sustained_wind.values)) - orig_pres.append(np.copy(track.central_pressure.values)) + orig_wind.append(np.copy(track['max_sustained_wind'].values)) + orig_pres.append(np.copy(track['central_pressure'].values)) if pool: chunksize = max(min(len(tracks) // pool.ncpus, 1000), 1) @@ -696,18 +696,18 @@ def _decay_values(track, land_geom, s_rel): sea_land_idx, land_sea_idx = climada.hazard.tc_tracks._get_landfall_idx(track) if sea_land_idx.size: for sea_land, land_sea in zip(sea_land_idx, land_sea_idx): - v_landfall = track.max_sustained_wind[sea_land - 1].values + v_landfall = track['max_sustained_wind'][sea_land - 1].values ss_scale = climada.hazard.tc_tracks.set_category(v_landfall, track.max_sustained_wind_unit) - v_land = track.max_sustained_wind[sea_land - 1:land_sea].values + v_land = track['max_sustained_wind'][sea_land - 1:land_sea].values if v_land[0] > 0: v_land = (v_land[1:] / v_land[0]).tolist() else: v_land = v_land[1:].tolist() - p_landfall = float(track.central_pressure[sea_land - 1].values) - p_land = track.central_pressure[sea_land - 1:land_sea].values + p_landfall = float(track['central_pressure'][sea_land - 1].values) + p_land = track['central_pressure'][sea_land - 1:land_sea].values p_land = (p_land[1:] / p_land[0]).tolist() p_land_s = _calc_decay_ps_value( @@ -719,12 +719,12 @@ def _decay_values(track, land_geom, s_rel): p_lf[ss_scale] = (array.array('f', p_land_s), array.array('f', p_land)) x_val[ss_scale] = array.array('f', - track.dist_since_lf[sea_land:land_sea]) + track['dist_since_lf'][sea_land:land_sea]) else: v_lf[ss_scale].extend(v_land) p_lf[ss_scale][0].extend(p_land_s) p_lf[ss_scale][1].extend(p_land) - x_val[ss_scale].extend(track.dist_since_lf[sea_land:land_sea]) + x_val[ss_scale].extend(track['dist_since_lf'][sea_land:land_sea]) return v_lf, p_lf, x_val @@ -870,8 +870,8 @@ def _apply_decay_coeffs(track, v_rel, p_rel, land_geom, s_rel): return track for idx, (sea_land, land_sea) \ in enumerate(zip(sea_land_idx, land_sea_idx)): - v_landfall = track.max_sustained_wind[sea_land - 1].values - p_landfall = float(track.central_pressure[sea_land - 1].values) + v_landfall = track['max_sustained_wind'][sea_land - 1].values + p_landfall = float(track['central_pressure'][sea_land - 1].values) ss_scale = climada.hazard.tc_tracks.set_category(v_landfall, track.max_sustained_wind_unit) if land_sea - sea_land == 1: @@ -880,10 +880,10 @@ def _apply_decay_coeffs(track, v_rel, p_rel, land_geom, s_rel): if S <= 1: # central_pressure at start of landfall > env_pres after landfall: # set central_pressure to environmental pressure during whole lf - track.central_pressure[sea_land:land_sea] = track.environmental_pressure[sea_land:land_sea] + track['central_pressure'][sea_land:land_sea] = track['environmental_pressure'][sea_land:land_sea] else: p_decay = _decay_p_function(S, p_rel[ss_scale][1], - track.dist_since_lf[sea_land:land_sea].values) + track['dist_since_lf'][sea_land:land_sea].values) # dont apply decay if it would decrease central pressure if np.any(p_decay < 1): LOGGER.info('Landfall decay would decrease pressure for ' @@ -892,12 +892,12 @@ def _apply_decay_coeffs(track, v_rel, p_rel, land_geom, s_rel): 'unphysical and therefore landfall decay is not ' 'applied in this case.', track.sid) - p_decay[p_decay < 1] = (track.central_pressure[sea_land:land_sea][p_decay < 1] + p_decay[p_decay < 1] = (track['central_pressure'][sea_land:land_sea][p_decay < 1] / p_landfall) - track.central_pressure[sea_land:land_sea] = p_landfall * p_decay + track['central_pressure'][sea_land:land_sea] = p_landfall * p_decay v_decay = _decay_v_function(v_rel[ss_scale], - track.dist_since_lf[sea_land:land_sea].values) + track['dist_since_lf'][sea_land:land_sea].values) # dont apply decay if it would increase wind speeds if np.any(v_decay > 1): # should not happen unless v_rel is negative @@ -905,13 +905,13 @@ def _apply_decay_coeffs(track, v_rel, p_rel, land_geom, s_rel): 'track id %s. This behavious in unphysical and ' 'therefore landfall decay is not applied in this ' 'case.', - track.sid) - v_decay[v_decay > 1] = (track.max_sustained_wind[sea_land:land_sea][v_decay > 1] + track['sid']) + v_decay[v_decay > 1] = (track['max_sustained_wind'][sea_land:land_sea][v_decay > 1] / v_landfall) track.max_sustained_wind[sea_land:land_sea] = v_landfall * v_decay # correct values of sea after a landfall (until next landfall, if any) - if land_sea < track.time.size: + if land_sea < track['time'].size: if idx + 1 < sea_land_idx.size: # if there is a next landfall, correct until last point before # reaching land again @@ -921,23 +921,23 @@ def _apply_decay_coeffs(track, v_rel, p_rel, land_geom, s_rel): # the track end_cor = track.time.size rndn = 0.1 * float(np.abs(np.random.normal(size=1) * 5) + 6) - r_diff = track.central_pressure[land_sea].values - \ - track.central_pressure[land_sea - 1].values + rndn - track.central_pressure[land_sea:end_cor] += - r_diff + r_diff = track['central_pressure'][land_sea].values - \ + track['central_pressure'][land_sea - 1].values + rndn + track['central_pressure'][land_sea:end_cor] += - r_diff rndn = rndn * 10 # mean value 10 - r_diff = track.max_sustained_wind[land_sea].values - \ - track.max_sustained_wind[land_sea - 1].values - rndn - track.max_sustained_wind[land_sea:end_cor] += - r_diff + r_diff = track['max_sustained_wind'][land_sea].values - \ + track['max_sustained_wind'][land_sea - 1].values - rndn + track['max_sustained_wind'][land_sea:end_cor] += - r_diff # correct limits warnings.filterwarnings('ignore') - cor_p = track.central_pressure.values > track.environmental_pressure.values - track.central_pressure[cor_p] = track.environmental_pressure[cor_p] - track.max_sustained_wind[track.max_sustained_wind < 0] = 0 + cor_p = track['central_pressure'].values > track['environmental_pressure'].values + track['central_pressure'][cor_p] = track['environmental_pressure'][cor_p] + track['max_sustained_wind'][track['max_sustained_wind'] < 0] = 0 track.attrs['category'] = climada.hazard.tc_tracks.set_category( - track.max_sustained_wind.values, track.max_sustained_wind_unit) + track['max_sustained_wind'].values, track.max_sustained_wind_unit) return track @@ -973,9 +973,9 @@ def _check_apply_decay_plot(all_tracks, syn_orig_wind, syn_orig_pres): def _calc_decay_ps_value(track, p_landfall, pos, s_rel): if s_rel: - p_land_s = track.environmental_pressure[pos].values + p_land_s = track['environmental_pressure'][pos].values else: - p_land_s = track.central_pressure[pos].values + p_land_s = track['central_pressure'][pos].values return float(p_land_s / p_landfall) @@ -1041,34 +1041,34 @@ def _check_apply_decay_syn_plot(sy_tracks, syn_orig_wind, sea_land_idx, land_sea_idx = climada.hazard.tc_tracks._get_landfall_idx(track) if sea_land_idx.size: for sea_land, land_sea in zip(sea_land_idx, land_sea_idx): - v_lf = track.max_sustained_wind[sea_land - 1].values - p_lf = track.central_pressure[sea_land - 1].values + v_lf = track['max_sustained_wind'][sea_land - 1].values + p_lf = track['central_pressure'][sea_land - 1].values scale_thresholds = climada.hazard.tc_tracks.SAFFIR_SIM_CAT ss_scale_idx = np.where(v_lf < scale_thresholds)[0][0] - on_land = np.arange(track.time.size)[sea_land:land_sea] + on_land = np.arange(track['time'].size)[sea_land:land_sea] - graph_v_a.plot(on_land, track.max_sustained_wind[on_land], + graph_v_a.plot(on_land, track['max_sustained_wind'][on_land], 'o', c=climada.hazard.tc_tracks.CAT_COLORS[ss_scale_idx]) graph_v_b.plot(on_land, orig_wind[on_land], 'o', c=climada.hazard.tc_tracks.CAT_COLORS[ss_scale_idx]) - graph_p_a.plot(on_land, track.central_pressure[on_land], + graph_p_a.plot(on_land, track['central_pressure'][on_land], 'o', c=climada.hazard.tc_tracks.CAT_COLORS[ss_scale_idx]) graph_p_b.plot(on_land, orig_pres[on_land], 'o', c=climada.hazard.tc_tracks.CAT_COLORS[ss_scale_idx]) - graph_pd_a.plot(track.dist_since_lf[on_land], - track.central_pressure[on_land] / p_lf, + graph_pd_a.plot(track['dist_since_lf'][on_land], + track['central_pressure'][on_land] / p_lf, 'o', c=climada.hazard.tc_tracks.CAT_COLORS[ss_scale_idx]) - graph_ped_a.plot(track.dist_since_lf[on_land], - track.environmental_pressure[on_land] - - track.central_pressure[on_land], + graph_ped_a.plot(track['dist_since_lf'][on_land], + track['environmental_pressure'][on_land] - + track['central_pressure'][on_land], 'o', c=climada.hazard.tc_tracks.CAT_COLORS[ss_scale_idx]) - on_sea = np.arange(track.time.size)[~track.on_land] - graph_v_a.plot(on_sea, track.max_sustained_wind[on_sea], + on_sea = np.arange(track['time'].size)[~track['on_land']] + graph_v_a.plot(on_sea, track['max_sustained_wind'][on_sea], 'o', c='k', markersize=5) graph_v_b.plot(on_sea, orig_wind[on_sea], 'o', c='k', markersize=5) - graph_p_a.plot(on_sea, track.central_pressure[on_sea], + graph_p_a.plot(on_sea, track['central_pressure'][on_sea], 'o', c='k', markersize=5) graph_p_b.plot(on_sea, orig_pres[on_sea], 'o', c='k', markersize=5) @@ -1104,27 +1104,27 @@ def _check_apply_decay_hist_plot(hist_tracks): if sea_land_idx.size: for sea_land, land_sea in zip(sea_land_idx, land_sea_idx): scale_thresholds = climada.hazard.tc_tracks.SAFFIR_SIM_CAT - ss_scale_idx = np.where(track.max_sustained_wind[sea_land - 1].values + ss_scale_idx = np.where(track['max_sustained_wind'][sea_land - 1].values < scale_thresholds)[0][0] - on_land = np.arange(track.time.size)[sea_land:land_sea] + on_land = np.arange(track['time'].size)[sea_land:land_sea] - graph_hv.add_curve(on_land, track.max_sustained_wind[on_land], + graph_hv.add_curve(on_land, track['max_sustained_wind'][on_land], 'o', c=climada.hazard.tc_tracks.CAT_COLORS[ss_scale_idx]) - graph_hp.add_curve(on_land, track.central_pressure[on_land], + graph_hp.add_curve(on_land, track['central_pressure'][on_land], 'o', c=climada.hazard.tc_tracks.CAT_COLORS[ss_scale_idx]) - graph_hpd_a.plot(track.dist_since_lf[on_land], - track.central_pressure[on_land] - / track.central_pressure[sea_land - 1].values, + graph_hpd_a.plot(track['dist_since_lf'][on_land], + track['central_pressure'][on_land] + / track['central_pressure'][sea_land - 1].values, 'o', c=climada.hazard.tc_tracks.CAT_COLORS[ss_scale_idx]) - graph_hped_a.plot(track.dist_since_lf[on_land], - track.environmental_pressure[on_land] - - track.central_pressure[on_land], + graph_hped_a.plot(track['dist_since_lf'][on_land], + track['environmental_pressure'][on_land] - + track['central_pressure'][on_land], 'o', c=climada.hazard.tc_tracks.CAT_COLORS[ss_scale_idx]) - on_sea = np.arange(track.time.size)[~track.on_land] - graph_hp.plot(on_sea, track.central_pressure[on_sea], + on_sea = np.arange(track['time'].size)[~track.on_land] + graph_hp.plot(on_sea, track['central_pressure'][on_sea], 'o', c='k', markersize=5) - graph_hv.plot(on_sea, track.max_sustained_wind[on_sea], + graph_hv.plot(on_sea, track['max_sustained_wind'][on_sea], 'o', c='k', markersize=5) return graph_hv, graph_hp, graph_hpd_a, graph_hped_a diff --git a/climada/hazard/trop_cyclone/trop_cyclone.py b/climada/hazard/trop_cyclone/trop_cyclone.py index 6fc8347e9..d11aec4f4 100644 --- a/climada/hazard/trop_cyclone/trop_cyclone.py +++ b/climada/hazard/trop_cyclone/trop_cyclone.py @@ -290,7 +290,7 @@ def from_tracks( if 'dist_coast' not in centroids.gdf.columns: dist_coast = centroids.get_dist_coast() else: - dist_coast = centroids.gdf.dist_coast.values + dist_coast = centroids.gdf['dist_coast'].values [idx_centr_filter] = ( (dist_coast <= max_dist_inland_km * 1000) & (np.abs(centroids.lat) <= max_latitude) @@ -463,23 +463,23 @@ def video_intensity( if not track: raise ValueError(f'{track_name} not found in track data.') idx_plt = np.argwhere( - (track.lon.values < centroids.total_bounds[2] + 1) - & (centroids.total_bounds[0] - 1 < track.lon.values) - & (track.lat.values < centroids.total_bounds[3] + 1) - & (centroids.total_bounds[1] - 1 < track.lat.values) + (track['lon'].values < centroids.total_bounds[2] + 1) + & (centroids.total_bounds[0] - 1 < track['lon'].values) + & (track['lat'].values < centroids.total_bounds[3] + 1) + & (centroids.total_bounds[1] - 1 < track['lat'].values) ).reshape(-1) tc_list = [] tr_coord = {'lat': [], 'lon': []} for node in range(idx_plt.size - 2): tr_piece = track.sel( - time=slice(track.time.values[idx_plt[node]], - track.time.values[idx_plt[node + 2]])) + time=slice(track['time'].values[idx_plt[node]], + track['time'].values[idx_plt[node + 2]])) tr_piece.attrs['n_nodes'] = 2 # plot only one node tr_sel = TCTracks() tr_sel.append(tr_piece) - tr_coord['lat'].append(tr_sel.data[0].lat.values[:-1]) - tr_coord['lon'].append(tr_sel.data[0].lon.values[:-1]) + tr_coord['lat'].append(tr_sel.data[0]['lat'].values[:-1]) + tr_coord['lon'].append(tr_sel.data[0]['lon'].values[:-1]) tc_tmp = cls.from_tracks(tr_sel, centroids=centroids) tc_tmp.event_name = [ @@ -525,8 +525,8 @@ def frequency_from_tracks(self, tracks: List): """ if not tracks: return - year_max = np.amax([t.time.dt.year.values.max() for t in tracks]) - year_min = np.amin([t.time.dt.year.values.min() for t in tracks]) + year_max = np.amax([t['time'].dt.year.values.max() for t in tracks]) + year_min = np.amin([t['time'].dt.year.values.min() for t in tracks]) year_delta = year_max - year_min + 1 num_orig = np.count_nonzero(self.orig) ens_size = (self.event_id.size / num_orig) if num_orig > 0 else 1 @@ -614,16 +614,16 @@ def from_single_track( new_haz.fraction = sparse.csr_matrix(new_haz.intensity.shape) # store first day of track as date new_haz.date = np.array([ - dt.datetime(track.time.dt.year.values[0], - track.time.dt.month.values[0], - track.time.dt.day.values[0]).toordinal() + dt.datetime(track['time'].dt.year.values[0], + track['time'].dt.month.values[0], + track['time'].dt.day.values[0]).toordinal() ]) new_haz.orig = np.array([track.orig_event_flag]) new_haz.category = np.array([track.category]) # users that pickle TCTracks objects might still have data with the legacy basin attribute, # so we have to deal with it here - new_haz.basin = [track.basin if isinstance(track.basin, str) - else str(track.basin.values[0])] + new_haz.basin = [track['basin'] if isinstance(track['basin'], str) + else str(track['basin'].values[0])] return new_haz def _apply_knutson_criterion(