|
7 | 7 | import os |
8 | 8 |
|
9 | 9 | location = "Cape_Hatteras" |
10 | | -path = f"/storage/shared/oceanparcels/output_data/data_Claudio/NEMO_Ensemble/" |
| 10 | +path = "/Volumes/Claudio SSD/Ensemble_article_data/" |
11 | 11 |
|
12 | | -Latitude_limit = 53 |
13 | | -Longitude_limit = None |
| 12 | +Latitude_limit = None |
| 13 | +Longitude_limit = -40 |
14 | 14 |
|
15 | 15 | # %% Spatial analysis |
16 | 16 | distributions = {} |
17 | 17 |
|
18 | 18 | total_members = 50 |
19 | 19 |
|
20 | | -for delta_r in [0.1, 1., 2.]: |
| 20 | +for K_h in [10]: |
21 | 21 | for member in tqdm(range(1, total_members + 1)): |
22 | | - print(f"Member: {member:03d}, Delta_r: {delta_r}") |
23 | | - file_path = path + f"{location}/spatial_long/dr_{delta_r*100:03.0f}/{location}_dr{delta_r*100:03.0f}_m{member:03d}.zarr" |
| 22 | + print(f"Member: {member:03d}, K_h: {K_h}") |
| 23 | + file_path = path + f"simulations/diff_Kh_10/{location}_diff_Kh_{K_h:01d}_m{member:03d}.nc" |
24 | 24 |
|
25 | | - pset = xr.open_zarr(file_path) |
| 25 | + pset = xr.load_dataset(file_path) |
26 | 26 | N_particles = len(pset.trajectory) |
27 | 27 |
|
28 | 28 | if Latitude_limit is not None: |
|
53 | 53 |
|
54 | 54 | # SAVE DISTRIBUTIONS in a pickle file |
55 | 55 | if Latitude_limit is not None: |
56 | | - save_path = path + f"analysis/connectivity/dr_{delta_r*100:03.0f}_{Latitude_limit}N/Distributions_dr{delta_r*100:03.0f}_m{member:03d}.pkl" |
| 56 | + save_path = path + f"analysis/connectivity/Kh_{K_h:01d}_{Latitude_limit}N/Distributions_Kh_{K_h:01d}_m{member:03d}.pkl" |
57 | 57 | elif Longitude_limit is not None: |
58 | | - save_path = path + f"analysis/connectivity/dr_{delta_r*100:03.0f}_{abs(Longitude_limit)}W/Distributions_dr{delta_r*100:03.0f}_m{member:03d}.pkl" |
| 58 | + save_path = path + f"analysis/connectivity/Kh_{K_h:01d}_{abs(Longitude_limit)}W/Distributions_Kh_{K_h:01d}_m{member:03d}.pkl" |
59 | 59 |
|
60 | | - # save_path = f"/storage/shared/oceanparcels/output_data/data_Claudio/NEMO_Ensemble/analysis/connectivity/dr_{delta_r*100:03.0f}/Distributions_dr{delta_r*100:03.0f}_m{member:03d}.pkl" |
61 | 60 | with open(save_path, "wb") as f: |
62 | 61 | pickle.dump(distributions, f) |
63 | 62 |
|
64 | 63 | else: |
65 | 64 | print(f"--EMPTY--") |
66 | | - |
67 | | -# %% Temporal analysis |
68 | | - |
69 | | -distributions = {} |
70 | | -total_members = 50 |
71 | | - |
72 | | -for week in [4, 12, 20]: |
73 | | - for member in tqdm(range(1, total_members + 1)): |
74 | | - print(f"Member: {member:03d}, Week: {week}") |
75 | | - file_path = path + f"{location}/temporal_long/W_{week:01d}/{location}_W{week:01d}_m{member:03d}.zarr" |
76 | | - |
77 | | - pset = xr.open_zarr(file_path) |
78 | | - N_particles = len(pset.trajectory) |
79 | | - |
80 | | - if Latitude_limit is not None: |
81 | | - lats = pset.lat.load().values |
82 | | - p_index, t_index = np.where(lats[:, :] > Latitude_limit) |
83 | | - elif Longitude_limit is not None: |
84 | | - lons = pset.lon.load().values |
85 | | - p_index, t_index = np.where(lons[:, :] > Longitude_limit) |
86 | | - |
87 | | - subpolar_traj = np.unique(p_index) |
88 | | - drift_time = [] |
89 | | - |
90 | | - if len(subpolar_traj) > 0: |
91 | | - for i in subpolar_traj: |
92 | | - idx_t = np.where(p_index == i)[0][0] |
93 | | - drift_time.append(t_index[idx_t]) |
94 | | - |
95 | | - drift_time = np.array(drift_time) |
96 | | - |
97 | | - depths = pset.z.load().values |
98 | | - depths = depths[subpolar_traj, drift_time] |
99 | | - |
100 | | - distributions["member"] = member |
101 | | - distributions["drift_time"] = drift_time |
102 | | - distributions["depths"] = depths |
103 | | - distributions["trajectory"] = np.unique(p_index) |
104 | | - |
105 | | - # SAVE DISTRIBUTIONS in a pickle file |
106 | | - if Latitude_limit is not None: |
107 | | - save_path = path + f"analysis/connectivity/W_{week:02d}_{Latitude_limit}N/Distributions_W{week:02d}_m{member:03d}.pkl" |
108 | | - elif Longitude_limit is not None: |
109 | | - save_path = path + f"analysis/connectivity/W_{week:02d}_{abs(Longitude_limit)}W/Distributions_W{week:02d}_m{member:03d}.pkl" |
110 | | - |
111 | | - with open(save_path, "wb") as f: |
112 | | - pickle.dump(distributions, f) |
113 | | - |
114 | | - else: |
115 | | - print(f"--EMPTY--") |
116 | | - |
117 | 65 |
|
118 | 66 | #%% Build the Pandas Dataframes from the pickle files |
119 | 67 | # ____________________Spatial__________________________ |
|
132 | 80 | median_depth = np.zeros(N_members) |
133 | 81 | std_depth = np.zeros(N_members) |
134 | 82 |
|
135 | | -for delta_r in [0.1, 1., 2.]: |
| 83 | +for K_h in [10]: |
136 | 84 | for member in range(1, N_members+1): |
137 | 85 |
|
138 | 86 | if Latitude_limit is not None: |
139 | | - pkl_path = path + f"analysis/connectivity/dr_{delta_r*100:03.0f}_{Latitude_limit}N/Distributions_dr{delta_r*100:03.0f}_m{member:03d}.pkl" |
| 87 | + pkl_path = path + f"analysis/connectivity/Kh_{K_h:01d}_{Latitude_limit}N/Distributions_Kh_{K_h:01d}_m{member:03d}.pkl" |
140 | 88 | elif Longitude_limit is not None: |
141 | | - pkl_path = path + f"analysis/connectivity/dr_{delta_r*100:03.0f}_{abs(Longitude_limit)}W/Distributions_dr{delta_r*100:03.0f}_m{member:03d}.pkl" |
| 89 | + pkl_path = path + f"analysis/connectivity/Kh_{K_h:01d}_{abs(Longitude_limit)}W/Distributions_Kh_{K_h:01d}_m{member:03d}.pkl" |
142 | 90 |
|
143 | 91 |
|
144 | 92 | if os.path.exists(pkl_path): |
|
184 | 132 | stats_df = pd.DataFrame(stats) |
185 | 133 |
|
186 | 134 | if Latitude_limit is not None: |
187 | | - save_csv_path = path + f"analysis/connectivity/Stats/Stats_dr{delta_r*100:03.0f}_{Latitude_limit}N.csv" |
| 135 | + save_csv_path = path + f"analysis/connectivity/Stats/Stats_Kh_{K_h:01d}_{Latitude_limit}N.csv" |
188 | 136 | elif Longitude_limit is not None: |
189 | | - save_csv_path = path + f"analysis/connectivity/Stats/Stats_dr{delta_r*100:03.0f}_{abs(Longitude_limit)}W.csv" |
190 | | - |
191 | | - stats_df.to_csv(save_csv_path) |
192 | | - print(f"Saved {save_csv_path}") |
193 | | - |
194 | | -#%% ___________________Temporal__________________________ |
195 | | -N_members = 50 |
196 | | - |
197 | | -stats = {} |
198 | | - |
199 | | -n_members = np.arange(1, N_members + 1) |
200 | | - |
201 | | -for week in [12, 20]: |
202 | | - counts = np.zeros(N_members) |
203 | | - median_time = np.zeros(N_members) |
204 | | - mean_time = np.zeros(N_members) |
205 | | - min_time = np.zeros(N_members) |
206 | | - std_time = np.zeros(N_members) |
| 137 | + save_csv_path = path + f"analysis/connectivity/Stats/Stats_Kh_{K_h:01d}_{abs(Longitude_limit)}W.csv" |
207 | 138 |
|
208 | | - mean_depth = np.zeros(N_members) |
209 | | - median_depth = np.zeros(N_members) |
210 | | - std_depth = np.zeros(N_members) |
211 | | - |
212 | | - |
213 | | - for member in range(1, N_members + 1): |
214 | | - |
215 | | - if Latitude_limit is not None: |
216 | | - pkl_path = path + f"analysis/connectivity/W_{week:02d}_{Latitude_limit}N/Distributions_W{week:02d}_m{member:03d}.pkl" |
217 | | - elif Longitude_limit is not None: |
218 | | - pkl_path = path + f"analysis/connectivity/W_{week:02d}_{abs(Longitude_limit)}W/Distributions_W{week:02d}_m{member:03d}.pkl" |
219 | | - |
220 | | - |
221 | | - if os.path.exists(pkl_path): |
222 | | - with open(pkl_path, "rb") as f: |
223 | | - distributions = pickle.load(f) |
224 | | - |
225 | | - drift_time = distributions["drift_time"] |
226 | | - print(f"Member {member} week {week}. Number of particles: {len(drift_time)}") |
227 | | - depths = distributions["depths"] |
228 | | - trajectory = distributions["trajectory"] |
229 | | - |
230 | | - |
231 | | - median_time[member - 1] = np.median(drift_time) |
232 | | - mean_time[member - 1] = np.mean(drift_time) |
233 | | - min_time[member - 1] = np.min(drift_time) |
234 | | - std_time[member - 1] = np.std(drift_time) |
235 | | - counts[member - 1] = len(drift_time) |
236 | | - |
237 | | - mean_depth[member - 1] = np.mean(depths) |
238 | | - median_depth[member - 1] = np.median(depths) |
239 | | - std_depth[member - 1] = np.std(depths) |
240 | | - else: |
241 | | - print(f"File {pkl_path} does not exist. Skipping member {member}.") |
242 | | - |
243 | | - median_time[member - 1] = np.nan |
244 | | - mean_time[member - 1] = np.nan |
245 | | - min_time[member - 1] = np.nan |
246 | | - std_time[member - 1] = np.nan |
247 | | - counts[member - 1] = 0 |
248 | | - |
249 | | - mean_depth[member - 1] = np.nan |
250 | | - median_depth[member - 1] = np.nan |
251 | | - std_depth[member - 1] = np.nan |
252 | | - |
253 | | - stats["subset"] = n_members |
254 | | - stats["counts"] = counts |
255 | | - stats["median_time"] = median_time |
256 | | - stats["mean_time"] = mean_time |
257 | | - stats["min_time"] = min_time |
258 | | - stats["std_time"] = std_time |
259 | | - stats["mean_depth"] = mean_depth |
260 | | - stats["median_depth"] = median_depth |
261 | | - stats["std_depth"] = std_depth |
262 | | - |
263 | | - stats_df = pd.DataFrame(stats) |
264 | | - |
265 | | - if Latitude_limit is not None: |
266 | | - save_csv_path = path + f"analysis/connectivity/Stats/Stats_W{week:02d}_{Latitude_limit}N.csv" |
267 | | - elif Longitude_limit is not None: |
268 | | - save_csv_path = path + f"analysis/connectivity/Stats/Stats_W{week:02d}_{abs(Longitude_limit)}W.csv" |
269 | | - |
270 | 139 | stats_df.to_csv(save_csv_path) |
271 | 140 | print(f"Saved {save_csv_path}") |
272 | 141 |
|
|
0 commit comments