@@ -197,6 +197,197 @@ def test_read_sp3_correct_svs_read_when_ev_ep_present(self, mock_file):
197
197
# TODO Add test(s) for correctly reading header fundamentals (ACC, ORB_TYPE, etc.)
198
198
# TODO add tests for correctly reading the actual content of the SP3 in addition to the header.
199
199
200
+ @staticmethod
201
+ def get_example_dataframe (template_name : str = "normal" , include_simple_header : bool = True ) -> pd .DataFrame :
202
+
203
+ dataframe_templates = {
204
+ # "normal": { # TODO fill in
205
+ # "data_vals": [],
206
+ # "index_vals": [],
207
+ # },
208
+ "dupe_epoch_offline_sat_empty_epoch" : {
209
+ "data_vals" : [
210
+ # Epoch 1 ---------------------------------
211
+ # EST, X EST, Y EST, Z
212
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G01
213
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G02
214
+ [0.000000 , 0.000000 , 0.000000 ], # ---------------- < G03 (offline)
215
+ # Epoch 2 ---------------------------------
216
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G01
217
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G02
218
+ [0.000000 , 0.000000 , 0.000000 ], # ---------------- < G03 (offline)
219
+ # Epoch 3 --------------------------------- Effectively missing epoch, to test trimming.
220
+ [np .nan , np .nan , np .nan ],
221
+ [np .nan , np .nan , np .nan ],
222
+ [np .nan , np .nan , np .nan ],
223
+ ],
224
+ "index_vals" : [[774619200 , 774619200 , 774619201 ], ["G01" , "G02" , "G03" ]],
225
+ },
226
+ "offline_sat_nan" : {
227
+ "data_vals" : [
228
+ # Epoch 1 ---------------------------------
229
+ # EST, X EST, Y EST, Z
230
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G01
231
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G02
232
+ [np .nan , np .nan , np .nan ], # ---------------- < G03 (offline)
233
+ # Epoch 2 ---------------------------------
234
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G01
235
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G02
236
+ [np .nan , np .nan , np .nan ], # ---------------- < G03 (offline)
237
+ # Epoch 3 ---------------------------------
238
+ [4510.358405 , - 23377.282442 , - 11792.723580 ],
239
+ [4510.358405 , - 23377.282442 , - 11792.723580 ],
240
+ [np .nan , np .nan , np .nan ],
241
+ ],
242
+ "index_vals" : [[774619200 , 774619200 , 774619201 ], ["G01" , "G02" , "G03" ]],
243
+ },
244
+ "offline_sat_zero" : {
245
+ "data_vals" : [
246
+ # Epoch 1 ---------------------------------
247
+ # EST, X EST, Y EST, Z
248
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G01
249
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G02
250
+ [0.000000 , 0.000000 , 0.000000 ], # ---------------- < G03 (offline)
251
+ # Epoch 2 ---------------------------------
252
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G01
253
+ [4510.358405 , - 23377.282442 , - 11792.723580 ], # --- < G02
254
+ [0.000000 , 0.000000 , 0.000000 ], # ---------------- < G03 (offline)
255
+ # Epoch 3 ---------------------------------
256
+ [4510.358405 , - 23377.282442 , - 11792.723580 ],
257
+ [4510.358405 , - 23377.282442 , - 11792.723580 ],
258
+ [0.000000 , 0.000000 , 0.000000 ],
259
+ ],
260
+ "index_vals" : [[774619200 , 774619200 , 774619201 ], ["G01" , "G02" , "G03" ]],
261
+ },
262
+ }
263
+
264
+ if template_name not in dataframe_templates :
265
+ raise ValueError (f"Unsupported template name: { template_name } " )
266
+
267
+ # Worked example for defining MultiIndex
268
+ # # Build a MultiIndex of J2000 then PRN values
269
+ # # ----------------------------- Epochs: ---------- | PRNs within each of those Epochs:
270
+ # # ------------------ Epoch 1 -- Epoch 2 -- Epoch 3 - PRN 1 PRN 2 PRN 3
271
+ # index_elements = [[774619200, 774619200, 774619201], ["G01", "G02", "G03"]]
272
+
273
+ # Define columns: top level 'EST' and nested under that, 'X', 'Y', 'Z'
274
+ frame_columns = [["EST" , "EST" , "EST" ], ["X" , "Y" , "Z" ]]
275
+
276
+ # Load template
277
+ template = dataframe_templates [template_name ]
278
+ frame_data = template ["data_vals" ]
279
+ index_elements = template ["index_vals" ]
280
+
281
+ index_names = ["J2000" , "PRN" ]
282
+ multi_index = pd .MultiIndex .from_product (index_elements , names = index_names )
283
+
284
+ # Compose it all into a DataFrame
285
+ df = pd .DataFrame (frame_data , index = multi_index , columns = frame_columns )
286
+
287
+ if include_simple_header :
288
+ # Build SV table
289
+ head_svs = ["G01" , "G02" , "G03" ] # SV header entries
290
+ head_svs_std = [0 , 0 , 0 ] # Accuracy codes for those SVs
291
+ sv_tbl = pd .Series (head_svs_std , index = head_svs )
292
+
293
+ # Build header
294
+ header_array = np .asarray (
295
+ [
296
+ "d" ,
297
+ "P" ,
298
+ "Time TODO" ,
299
+ "3" , # Num epochs
300
+ "Data TODO" ,
301
+ "coords TODO" ,
302
+ "orb type TODO" ,
303
+ "GAA" ,
304
+ "SP3" , # Probably
305
+ "Time sys TODO" ,
306
+ "3" , # Stated SVs
307
+ ]
308
+ ).astype (str )
309
+ sp3_heading = pd .Series (
310
+ data = header_array ,
311
+ index = [
312
+ "VERSION" ,
313
+ "PV_FLAG" ,
314
+ "DATETIME" ,
315
+ "N_EPOCHS" ,
316
+ "DATA_USED" ,
317
+ "COORD_SYS" ,
318
+ "ORB_TYPE" ,
319
+ "AC" ,
320
+ "FILE_TYPE" ,
321
+ "TIME_SYS" ,
322
+ "SV_COUNT_STATED" ,
323
+ ],
324
+ )
325
+
326
+ # Merge SV table and header, and store as 'HEADER'
327
+ df .attrs ["HEADER" ] = pd .concat ([sp3_heading , sv_tbl ], keys = ["HEAD" , "SV_INFO" ], axis = 0 )
328
+ return df
329
+
330
+ def test_clean_sp3_orb (self ):
331
+ """
332
+ Tests cleaning an SP3 DataFrame of duplicates, leading or trailing nodata values, and offline sats
333
+ """
334
+
335
+ # Create dataframe manually, as read function does deduplication itself. This also makes the test more self-contained
336
+ sp3_df = TestSP3 .get_example_dataframe ("dupe_epoch_offline_sat_empty_epoch" )
337
+
338
+ self .assertTrue (
339
+ # all() enables elementwise equality check
340
+ np .array_equal (sp3_df .index .get_level_values (0 ).unique (), [774619200 , 774619201 ]),
341
+ "Sample data should have 2 unique epochs (one of which is empty)" ,
342
+ )
343
+ self .assertTrue (
344
+ np .array_equal (sp3_df .index .get_level_values (1 ).unique (), ["G01" , "G02" , "G03" ]),
345
+ "Sample data should have 3 sats" ,
346
+ )
347
+
348
+ # There should be duplicates of each sat in the first epoch
349
+ self .assertTrue (
350
+ np .array_equal (sp3_df .loc [774619200 , "G01" ]["EST" ]["X" ].values , [4510.358405 , 4510.358405 ]),
351
+ "Expect dupe in first epoch" ,
352
+ )
353
+
354
+ # Test cleaning function without offline sat removal
355
+ sp3_df_no_offline_removal = sp3 .clean_sp3_orb (sp3_df , False )
356
+
357
+ self .assertTrue (
358
+ np .array_equal (sp3_df_no_offline_removal .index .get_level_values (0 ).unique (), [774619200 ]),
359
+ "After cleaning there should be a single unique epoch" ,
360
+ )
361
+
362
+ # This checks both (indirectly) that there is only one epoch (as the multi-index will repeat second level
363
+ # values, and the input doesn't change sats in successive epochs), and that those second level values
364
+ # aren't duplicated.
365
+ self .assertTrue (
366
+ np .array_equal (sp3_df_no_offline_removal .index .get_level_values (1 ), ["G01" , "G02" , "G03" ]),
367
+ "After cleaning there should be no dupe PRNs" ,
368
+ )
369
+ # Other things we could do...
370
+ # One test could be len(sp3_df_no_offline_removal.loc[774619200]["EST"]["X"]) == 3
371
+ # Three x values implies three sats, but doesn't prove no dupe sat...
372
+ # Also testing that the unique list of PRNs is 3 would cover it...
373
+
374
+ self .assertTrue (
375
+ np .array_equal (sp3_df_no_offline_removal .index .get_level_values (1 ).unique (), ["G01" , "G02" , "G03" ]),
376
+ "With offline sat removal off, expect offline sat to remain" ,
377
+ )
378
+
379
+ # Now check with offline sat removal enabled too
380
+ sp3_df_with_offline_removal = sp3 .clean_sp3_orb (sp3_df , True )
381
+ self .assertTrue (
382
+ np .array_equal (sp3_df_with_offline_removal .index .get_level_values (1 ), ["G01" , "G02" ]),
383
+ "With offline sat removal off, expect offline sat to be gone" ,
384
+ )
385
+ # Check that we still seen to have one epoch with no dupe sats
386
+ self .assertTrue (
387
+ np .array_equal (sp3_df_with_offline_removal .index .get_level_values (1 ), ["G01" , "G02" ]),
388
+ "After cleaning there should be no dupe PRNs" ,
389
+ )
390
+
200
391
def test_gen_sp3_fundamentals (self ):
201
392
"""
202
393
Tests that the SP3 header and content generation functions produce output that (apart from trailing
@@ -737,6 +928,39 @@ def test_velinterpolation(self, mock_file):
737
928
self .assertIsNotNone (r )
738
929
self .assertIsNotNone (r2 )
739
930
931
+ def test_sp3_offline_sat_removal_standalone (self ):
932
+ """
933
+ Standalone test for remove_offline_sats() using manually constructed DataFrame to
934
+ avoid dependency on read_sp3()
935
+ """
936
+ sp3_df_nans = TestSP3 .get_example_dataframe ("offline_sat_nan" )
937
+ sp3_df_zeros = TestSP3 .get_example_dataframe ("offline_sat_zero" )
938
+
939
+ self .assertEqual (
940
+ sp3_df_zeros .index .get_level_values (1 ).unique ().array .tolist (),
941
+ ["G01" , "G02" , "G03" ],
942
+ "Should start with 3 SVs" ,
943
+ )
944
+ self .assertEqual (
945
+ sp3_df_nans .index .get_level_values (1 ).unique ().array .tolist (),
946
+ ["G01" , "G02" , "G03" ],
947
+ "Should start with 3 SVs" ,
948
+ )
949
+
950
+ sp3_df_zeros_removed = sp3 .remove_offline_sats (sp3_df_zeros )
951
+ sp3_df_nans_removed = sp3 .remove_offline_sats (sp3_df_nans )
952
+
953
+ self .assertEqual (
954
+ sp3_df_zeros_removed .index .get_level_values (1 ).unique ().array .tolist (),
955
+ ["G01" , "G02" ],
956
+ "Should be two SVs after removing offline ones" ,
957
+ )
958
+ self .assertEqual (
959
+ sp3_df_nans_removed .index .get_level_values (1 ).unique ().array .tolist (),
960
+ ["G01" , "G02" ],
961
+ "Should be two SVs after removing offline ones" ,
962
+ )
963
+
740
964
@patch ("builtins.open" , new_callable = mock_open , read_data = offline_sat_test_data )
741
965
def test_sp3_offline_sat_removal (self , mock_file ):
742
966
sp3_df = sp3 .read_sp3 ("mock_path" , pOnly = False )
0 commit comments