@@ -197,6 +197,186 @@ def test_read_sp3_correct_svs_read_when_ev_ep_present(self, mock_file):
197
197
# TODO Add test(s) for correctly reading header fundamentals (ACC, ORB_TYPE, etc.)
198
198
# TODO add tests for correctly reading the actual content of the SP3 in addition to the header.
199
199
200
+ @staticmethod
201
+ def get_example_dataframe (template_name : str = "normal" , include_simple_header : bool = True ) -> pd .DataFrame :
202
+
203
+ dataframe_templates = {
204
+ # "normal": { # TODO fill in
205
+ # "data_vals": [],
206
+ # "index_vals": [],
207
+ # },
208
+ "dupe_epoch_offline_sat_empty_epoch" : {
209
+ "data_vals" : [
210
+ # Epoch 1 ---------------------------------
211
+ # EST, X EST, Y EST, Z
212
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G01
213
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G02
214
+ [0.000000 , 0.000000 , 0.000000 ], # ---------------- < G03 (offline)
215
+ # Epoch 2 ---------------------------------
216
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G01
217
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G02
218
+ [0.000000 , 0.000000 , 0.000000 ], # ---------------- < G03 (offline)
219
+ # Epoch 3 --------------------------------- Effectively missing epoch, to test trimming.
220
+ [np .nan , np .nan , np .nan ],
221
+ [np .nan , np .nan , np .nan ],
222
+ [np .nan , np .nan , np .nan ],
223
+ ],
224
+ "index_vals" : [[774619200 , 774619200 , 774619201 ], ["G01" , "G02" , "G03" ]],
225
+ },
226
+ "offline_sat_nan" : {
227
+ "data_vals" : [
228
+ # Epoch 1 ---------------------------------
229
+ # EST, X EST, Y EST, Z
230
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G01
231
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G02
232
+ [np .nan , np .nan , np .nan ], # ---------------- < G03 (offline)
233
+ # Epoch 2 ---------------------------------
234
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G01
235
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G02
236
+ [np .nan , np .nan , np .nan ], # ---------------- < G03 (offline)
237
+ # Epoch 3 ---------------------------------
238
+ [4510.358405 , - 23377.282442 , - 11792.723580 ],
239
+ [4510.358405 , - 23377.282442 , - 11792.723580 ],
240
+ [np .nan , np .nan , np .nan ],
241
+ ],
242
+ "index_vals" : [[774619200 , 774619200 , 774619201 ], ["G01" , "G02" , "G03" ]],
243
+ },
244
+ "offline_sat_zero" : {
245
+ "data_vals" : [
246
+ # Epoch 1 ---------------------------------
247
+ # EST, X EST, Y EST, Z
248
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G01
249
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G02
250
+ [0.000000 , 0.000000 , 0.000000 ], # ---------------- < G03 (offline)
251
+ # Epoch 2 ---------------------------------
252
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G01
253
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G02
254
+ [0.000000 , 0.000000 , 0.000000 ], # ---------------- < G03 (offline)
255
+ # Epoch 3 ---------------------------------
256
+ [4510.358405 , - 23377.282442 , - 11792.723580 ],
257
+ [4510.358405 , - 23377.282442 , - 11792.723580 ],
258
+ [0.000000 , 0.000000 , 0.000000 ],
259
+ ],
260
+ "index_vals" : [[774619200 , 774619200 , 774619201 ], ["G01" , "G02" , "G03" ]],
261
+ },
262
+ }
263
+
264
+ if template_name not in dataframe_templates :
265
+ raise ValueError (f"Unsupported template name: { template_name } " )
266
+
267
+ # Worked example for defining MultiIndex
268
+ # # Build a MultiIndex of J2000 then PRN values
269
+ # # ----------------------------- Epochs: ---------- | PRNs within each of those Epochs:
270
+ # # ------------------ Epoch 1 -- Epoch 2 -- Epoch 3 - PRN 1 PRN 2 PRN 3
271
+ # index_elements = [[774619200, 774619200, 774619201], ["G01", "G02", "G03"]]
272
+
273
+ # Define columns: top level 'EST' and nested under that, 'X', 'Y', 'Z'
274
+ frame_columns = [["EST" , "EST" , "EST" ], ["X" , "Y" , "Z" ]]
275
+
276
+ # Load template
277
+ template = dataframe_templates [template_name ]
278
+ frame_data = template ["data_vals" ]
279
+ index_elements = template ["index_vals" ]
280
+
281
+ index_names = ["J2000" , "PRN" ]
282
+ multi_index = pd .MultiIndex .from_product (index_elements , names = index_names )
283
+
284
+ # Compose it all into a DataFrame
285
+ df = pd .DataFrame (frame_data , index = multi_index , columns = frame_columns )
286
+
287
+ if include_simple_header :
288
+ # Build SV table
289
+ head_svs = ["G01" , "G02" , "G03" ] # SV header entries
290
+ head_svs_std = [0 , 0 , 0 ] # Accuracy codes for those SVs
291
+ sv_tbl = pd .Series (head_svs_std , index = head_svs )
292
+
293
+ # Build header
294
+ header_array = np .asarray (
295
+ [
296
+ "d" ,
297
+ "P" ,
298
+ "Time TODO" ,
299
+ "3" , # Num epochs
300
+ "Data TODO" ,
301
+ "coords TODO" ,
302
+ "orb type TODO" ,
303
+ "GAA" ,
304
+ "SP3" , # Probably
305
+ "Time sys TODO" ,
306
+ "3" , # Stated SVs
307
+ ]
308
+ ).astype (str )
309
+ sp3_heading = pd .Series (
310
+ data = header_array ,
311
+ index = [
312
+ "VERSION" ,
313
+ "PV_FLAG" ,
314
+ "DATETIME" ,
315
+ "N_EPOCHS" ,
316
+ "DATA_USED" ,
317
+ "COORD_SYS" ,
318
+ "ORB_TYPE" ,
319
+ "AC" ,
320
+ "FILE_TYPE" ,
321
+ "TIME_SYS" ,
322
+ "SV_COUNT_STATED" ,
323
+ ],
324
+ )
325
+
326
+ # Merge SV table and header, and store as 'HEADER'
327
+ df .attrs ["HEADER" ] = pd .concat ([sp3_heading , sv_tbl ], keys = ["HEAD" , "SV_INFO" ], axis = 0 )
328
+ return df
329
+
330
+ def test_clean_sp3_orb (self ):
331
+ """
332
+ Tests cleaning an SP3 DataFrame of duplicates, leading or trailing nodata values, and offline sats
333
+ """
334
+
335
+ # Create dataframe manually, as read function does deduplication itself. This also makes the test more self-contained
336
+ sp3_df = TestSP3 .get_example_dataframe ("dupe_epoch_offline_sat_empty_epoch" )
337
+
338
+ self .assertTrue (
339
+ # Alterantively you can use all(array == array) to do an elementwise equality check
340
+ np .array_equal (sp3_df .index .get_level_values (0 ).unique (), [774619200 , 774619201 ]),
341
+ "Sample data should have 2 unique epochs (one of which is empty)" ,
342
+ )
343
+ self .assertTrue (
344
+ np .array_equal (sp3_df .index .get_level_values (1 ).unique (), ["G01" , "G02" , "G03" ]),
345
+ "Sample data should have 3 sats" ,
346
+ )
347
+
348
+ # There should be duplicates of each sat in the first epoch
349
+ # Note: syntax of loc here uses a tuple describing levels within the row MultiIndex, then column MultiIndex,
350
+ # i.e. (row, row), (column, column).
351
+ self .assertTrue (
352
+ np .array_equal (sp3_df .loc [(774619200 , "G01" ), ("EST" , "X" )].values , [4510.358405 , 4510.358405 ]),
353
+ "Expect dupe in first epoch" ,
354
+ )
355
+
356
+ # Test cleaning function without offline sat removal
357
+ sp3_df_no_offline_removal = sp3 .clean_sp3_orb (sp3_df , False )
358
+
359
+ self .assertTrue (
360
+ np .array_equal (sp3_df_no_offline_removal .index .get_level_values (0 ).unique (), [774619200 ]),
361
+ "After cleaning there should be a single unique epoch" ,
362
+ )
363
+
364
+ # This checks both (indirectly) that there is only one epoch (as the multi-index will repeat second level
365
+ # values, and the input doesn't change sats in successive epochs), and that those second level values
366
+ # aren't duplicated.
367
+ self .assertTrue (
368
+ np .array_equal (sp3_df_no_offline_removal .index .get_level_values (1 ), ["G01" , "G02" , "G03" ]),
369
+ "After cleaning there should be no dupe PRNs. As offline sat removal is off, offline sat should remain" ,
370
+ )
371
+
372
+ # Now check with offline sat removal enabled too
373
+ sp3_df_with_offline_removal = sp3 .clean_sp3_orb (sp3_df , True )
374
+ # Check that we still seem to have one epoch with no dupe sats, and now with the offline sat removed
375
+ self .assertTrue (
376
+ np .array_equal (sp3_df_with_offline_removal .index .get_level_values (1 ), ["G01" , "G02" ]),
377
+ "After cleaning there should be no dupe PRNs (and with offline removal, offline sat should be gone)" ,
378
+ )
379
+
200
380
def test_gen_sp3_fundamentals (self ):
201
381
"""
202
382
Tests that the SP3 header and content generation functions produce output that (apart from trailing
@@ -737,6 +917,39 @@ def test_velinterpolation(self, mock_file):
737
917
self .assertIsNotNone (r )
738
918
self .assertIsNotNone (r2 )
739
919
920
+ def test_sp3_offline_sat_removal_standalone (self ):
921
+ """
922
+ Standalone test for remove_offline_sats() using manually constructed DataFrame to
923
+ avoid dependency on read_sp3()
924
+ """
925
+ sp3_df_nans = TestSP3 .get_example_dataframe ("offline_sat_nan" )
926
+ sp3_df_zeros = TestSP3 .get_example_dataframe ("offline_sat_zero" )
927
+
928
+ self .assertEqual (
929
+ sp3_df_zeros .index .get_level_values (1 ).unique ().array .tolist (),
930
+ ["G01" , "G02" , "G03" ],
931
+ "Should start with 3 SVs" ,
932
+ )
933
+ self .assertEqual (
934
+ sp3_df_nans .index .get_level_values (1 ).unique ().array .tolist (),
935
+ ["G01" , "G02" , "G03" ],
936
+ "Should start with 3 SVs" ,
937
+ )
938
+
939
+ sp3_df_zeros_removed = sp3 .remove_offline_sats (sp3_df_zeros )
940
+ sp3_df_nans_removed = sp3 .remove_offline_sats (sp3_df_nans )
941
+
942
+ self .assertEqual (
943
+ sp3_df_zeros_removed .index .get_level_values (1 ).unique ().array .tolist (),
944
+ ["G01" , "G02" ],
945
+ "Should be two SVs after removing offline ones" ,
946
+ )
947
+ self .assertEqual (
948
+ sp3_df_nans_removed .index .get_level_values (1 ).unique ().array .tolist (),
949
+ ["G01" , "G02" ],
950
+ "Should be two SVs after removing offline ones" ,
951
+ )
952
+
740
953
@patch ("builtins.open" , new_callable = mock_open , read_data = offline_sat_test_data )
741
954
def test_sp3_offline_sat_removal (self , mock_file ):
742
955
sp3_df = sp3 .read_sp3 ("mock_path" , pOnly = False )
0 commit comments