diff --git a/CHANGELOG.md b/CHANGELOG.md index 12e8a1b..c77484d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] ### Added + - Issue 229 - Create new database tables for version d collections - Issue 211 - Query track ingest table for granules with "to_ingest" status - Issue 212 - Update track ingest table with granule status - Issue 203 - Construct CNM to trigger load data operations and ingest granule diff --git a/hydrocron/db/io/swot_shp.py b/hydrocron/db/io/swot_shp.py index 8b0c5ec..78e45ee 100644 --- a/hydrocron/db/io/swot_shp.py +++ b/hydrocron/db/io/swot_shp.py @@ -84,7 +84,7 @@ def read_shapefile(filepath, obscure_data, columns, s3_resource=None): np.random.default_rng().integers(low=2, high=10)*shp_file[numeric_columns], shp_file[numeric_columns]) - filename_attrs = parse_from_filename(filename) + filename_attrs = parse_from_filename(filepath) xml_attrs = parse_metadata_from_shpxml(shp_xml_tree) @@ -204,15 +204,15 @@ def assemble_attributes(geodf, attributes): return items -def parse_from_filename(filename): +def parse_from_filename(filepath): """ Parses the cycle, pass, start and end time from the shapefile name and add to each item Parameters ---------- - filename : string - The string to parse + filepath : string + The full uri of the granule to parse Returns ------- @@ -220,22 +220,16 @@ def parse_from_filename(filename): A dictionary of attributes from the filename """ logging.info('Starting parse attributes from filename') + + filename = os.path.basename(filepath) filename_components = filename.split("_") collection = "" collection_version = "" - if 'RiverSP_Reach' in filename: - collection = constants.SWOT_REACH_COLLECTION_NAME - collection_version = constants.SWOT_REACH_COLLECTION_VERSION - - if 'RiverSP_Node' in filename: - collection = constants.SWOT_NODE_COLLECTION_NAME - collection_version = constants.SWOT_NODE_COLLECTION_VERSION - - if 'LakeSP_Prior' in filename: - collection = constants.SWOT_PRIOR_LAKE_COLLECTION_NAME - collection_version = constants.SWOT_PRIOR_LAKE_COLLECTION_VERSION + for table_info in constants.TABLE_COLLECTION_INFO: + if (table_info['feature_type'] in filename) & (table_info['collection_name'] in filepath): + collection = table_info['collection_name'] filename_attrs = { 'cycle_id': filename_components[5], @@ -283,7 +277,7 @@ def load_benchmarking_data(): 'continent_id': 'XX', 'range_end_time': '2024-12-31T23:59:00Z', 'crid': 'TEST', - 'collection_shortname': constants.SWOT_REACH_COLLECTION_NAME + 'collection_shortname': constants.TABLE_COLLECTION_INFO[0]['collection_name'] } items = assemble_attributes(csv_file, filename_attrs) diff --git a/hydrocron/db/load_data.py b/hydrocron/db/load_data.py index 4ce20dd..584dc31 100755 --- a/hydrocron/db/load_data.py +++ b/hydrocron/db/load_data.py @@ -42,25 +42,14 @@ def lambda_handler(event, _): # noqa: E501 # pylint: disable=W0613 end_date = event['body']['end_date'] load_benchmarking_data = event['body']['load_benchmarking_data'] - match table_name: - case constants.SWOT_REACH_TABLE_NAME: - collection_shortname = constants.SWOT_REACH_COLLECTION_NAME - track_table = constants.SWOT_REACH_TRACK_INGEST_TABLE_NAME - feature_type = 'Reach' - case constants.SWOT_NODE_TABLE_NAME: - collection_shortname = constants.SWOT_NODE_COLLECTION_NAME - track_table = constants.SWOT_NODE_TRACK_INGEST_TABLE_NAME - feature_type = 'Node' - case constants.SWOT_PRIOR_LAKE_TABLE_NAME: - collection_shortname = constants.SWOT_PRIOR_LAKE_COLLECTION_NAME - track_table = constants.SWOT_PRIOR_LAKE_TRACK_INGEST_TABLE_NAME - feature_type = 'LakeSP_Prior' - case constants.DB_TEST_TABLE_NAME: - collection_shortname = constants.SWOT_REACH_COLLECTION_NAME - track_table = constants.SWOT_REACH_TRACK_INGEST_TABLE_NAME - feature_type = 'Reach' - case _: - raise MissingTable(f"Hydrocron table '{table_name}' does not exist.") + for table_info in constants.TABLE_COLLECTION_INFO: + if table_info['table_name'] in table_name: + collection_shortname = table_info['collection_name'] + track_table = table_info['track_table'] + feature_type = table_info['feature_type'] + break + else: + raise MissingTable(f"Error: Table does not exist: {table_name}") logging.info("Searching for granules in collection %s", collection_shortname) @@ -107,8 +96,6 @@ def granule_handler(event, _): Second Lambda entrypoint for loading individual granules """ granule_path = event['body']['granule_path'] - table_name = event['body']['table_name'] - track_table = event['body']['track_table'] load_benchmarking_data = event['body']['load_benchmarking_data'] @@ -124,17 +111,16 @@ def granule_handler(event, _): revision_date = "Not Found" logging.info('No CNM revision date') - if ("Reach" in granule_path) & (table_name != constants.SWOT_REACH_TABLE_NAME): - raise TableMisMatch(f"Error: Cannot load Reach data into table: '{table_name}'") - - if ("Node" in granule_path) & (table_name != constants.SWOT_NODE_TABLE_NAME): - raise TableMisMatch(f"Error: Cannot load Node data into table: '{table_name}'") - - if ("LakeSP_Prior" in granule_path) & (table_name != constants.SWOT_PRIOR_LAKE_TABLE_NAME): - raise TableMisMatch(f"Error: Cannot load Prior Lake data into table: '{table_name}'") - if ("LakeSP_Obs" in granule_path) | ("LakeSP_Unassigned" in granule_path): - raise TableMisMatch(f"Error: Cannot load Observed or Unassigned Lake data into table: '{table_name}'") + raise MissingTable("Error: Cannot load Observed or Unassigned Lake data") + + for table_info in constants.TABLE_COLLECTION_INFO: + if (table_info['collection_name'] in granule_path) & (table_info['feature_type'] in granule_path): + table_name = table_info['table'] + track_table = table_info['track_table'] + break + else: + raise MissingTable(f"Error: Cannot load granule: {granule_path}, no support for this collection") logging.info("Value of load_benchmarking_data is: %s", load_benchmarking_data) @@ -189,50 +175,27 @@ def cnm_handler(event, _): granule_uri = files['uri'] checksum = files['checksum'] - if 'Reach' in granule_uri: - event2 = ('{"body": {"granule_path": "' + granule_uri - + '","table_name": "' + constants.SWOT_REACH_TABLE_NAME - + '","track_table": "' + constants.SWOT_REACH_TRACK_INGEST_TABLE_NAME - + '","checksum": "' + checksum - + '","revisionDate": "' + revision_date - + '","load_benchmarking_data": "' + load_benchmarking_data + '"}}') - - logging.info("Invoking granule load lambda with event json %s", str(event2)) + for table_info in constants.TABLE_COLLECTION_INFO: + if (table_info['collection_name'] in granule_uri) & (table_info['feature_type'] in granule_uri): + table_name = table_info['table_name'] + track_table = table_info['track_table'] + break + else: + raise MissingTable(f"Error: Cannot load granule: {granule_uri}") - lambda_client.invoke( - FunctionName=os.environ['GRANULE_LAMBDA_FUNCTION_NAME'], - InvocationType='Event', - Payload=event2) + event2 = ('{"body": {"granule_path": "' + granule_uri + + '","table_name": "' + table_name + + '","track_table": "' + track_table + + '","checksum": "' + checksum + + '","revisionDate": "' + revision_date + + '","load_benchmarking_data": "' + load_benchmarking_data + '"}}') - if 'Node' in granule_uri: - event2 = ('{"body": {"granule_path": "' + granule_uri - + '","table_name": "' + constants.SWOT_NODE_TABLE_NAME - + '","track_table": "' + constants.SWOT_NODE_TRACK_INGEST_TABLE_NAME - + '","checksum": "' + checksum - + '","revisionDate": "' + revision_date - + '","load_benchmarking_data": "' + load_benchmarking_data + '"}}') + logging.info("Invoking granule load lambda with event json %s", str(event2)) - logging.info("Invoking granule load lambda with event json %s", str(event2)) - - lambda_client.invoke( - FunctionName=os.environ['GRANULE_LAMBDA_FUNCTION_NAME'], - InvocationType='Event', - Payload=event2) - - if 'LakeSP_Prior' in granule_uri: - event2 = ('{"body": {"granule_path": "' + granule_uri - + '","table_name": "' + constants.SWOT_PRIOR_LAKE_TABLE_NAME - + '","track_table": "' + constants.SWOT_PRIOR_LAKE_TRACK_INGEST_TABLE_NAME - + '","checksum": "' + checksum - + '","revisionDate": "' + revision_date - + '","load_benchmarking_data": "' + load_benchmarking_data + '"}}') - - logging.info("Invoking granule load lambda with event json %s", str(event2)) - - lambda_client.invoke( - FunctionName=os.environ['GRANULE_LAMBDA_FUNCTION_NAME'], - InvocationType='Event', - Payload=event2) + lambda_client.invoke( + FunctionName=os.environ['GRANULE_LAMBDA_FUNCTION_NAME'], + InvocationType='Event', + Payload=event2) def find_new_granules(collection_shortname, start_date, end_date): @@ -337,27 +300,17 @@ def load_data(dynamo_resource, table_name, items): raise MissingTable(f"Hydrocron table '{table_name}' does not exist.") from err raise err - match hydrocron_table.table_name: - case constants.SWOT_REACH_TABLE_NAME: - feature_name = 'reach' - feature_id = feature_name + '_id' - case constants.SWOT_NODE_TABLE_NAME: - feature_name = 'node' - feature_id = feature_name + '_id' - case constants.SWOT_PRIOR_LAKE_TABLE_NAME: - feature_name = 'prior_lake' - feature_id = 'lake_id' - case constants.SWOT_REACH_TRACK_INGEST_TABLE_NAME: - feature_name = 'track ingest reaches' + for table_info in constants.TABLE_COLLECTION_INFO: + if hydrocron_table.table_name in table_info['track_table']: + feature_name = 'track ingest ' + str.lower(table_info['feature_type']) feature_id = 'granuleUR' - case constants.SWOT_NODE_TRACK_INGEST_TABLE_NAME: - feature_name = 'track ingest nodes' - feature_id = 'granuleUR' - case constants.SWOT_PRIOR_LAKE_TRACK_INGEST_TABLE_NAME: - feature_name = 'track ingest prior lakes' - feature_id = 'granuleUR' - case _: - logging.warning('Items cannot be parsed, file reader not implemented for table %s', hydrocron_table.table_name) + break + if hydrocron_table.table_name in table_info['table_name']: + feature_name = table_info['feature_type'] + feature_id = table_info['feature_id'] + break + else: + raise MissingTable(f'Items cannot be parsed, file reader not implemented for table {hydrocron_table.table_name}') if len(items) > 5: logging.info("Batch adding %s %s items. First 5 feature ids in batch: ", len(items), feature_name) diff --git a/hydrocron/db/track_ingest.py b/hydrocron/db/track_ingest.py index 2a90ac0..70eac34 100644 --- a/hydrocron/db/track_ingest.py +++ b/hydrocron/db/track_ingest.py @@ -401,17 +401,13 @@ def track_ingest_handler(event, context): reprocessed_crid = event["reprocessed_crid"] temporal = "temporal" in event.keys() - if ("reach" in collection_shortname) and ((hydrocron_table != constants.SWOT_REACH_TABLE_NAME) - or (hydrocron_track_table != constants.SWOT_REACH_TRACK_INGEST_TABLE_NAME)): - raise TableMisMatch(f"Error: Cannot query reach data for tables: '{hydrocron_table}' and '{hydrocron_track_table}'") - - if ("node" in collection_shortname) and ((hydrocron_table != constants.SWOT_NODE_TABLE_NAME) - or (hydrocron_track_table != constants.SWOT_NODE_TRACK_INGEST_TABLE_NAME)): - raise TableMisMatch(f"Error: Cannot query node data for tables: '{hydrocron_table}' and '{hydrocron_track_table}'") - - if ("prior" in collection_shortname) and ((hydrocron_table != constants.SWOT_PRIOR_LAKE_TABLE_NAME) - or (hydrocron_track_table != constants.SWOT_PRIOR_LAKE_TRACK_INGEST_TABLE_NAME)): - raise TableMisMatch(f"Error: Cannot query prior lake data for tables: '{hydrocron_table}' and '{hydrocron_track_table}'") + for table_info in constants.TABLE_COLLECTION_INFO: + if (table_info['collection_name'] in collection_shortname) & (str.lower(table_info['feature_type']) in collection_shortname): + hydrocron_table = table_info['table_name'] + hydrocron_track_table = table_info['track_table'] + break + else: + raise TableMisMatch(f"Error: Cannot query data for tables: '{hydrocron_table}' and '{hydrocron_track_table}'") if temporal: query_start = datetime.datetime.strptime(event["query_start"], "%Y-%m-%dT%H:%M:%S").replace(tzinfo=timezone.utc) diff --git a/hydrocron/utils/constants.py b/hydrocron/utils/constants.py index 411acc2..6ec541c 100644 --- a/hydrocron/utils/constants.py +++ b/hydrocron/utils/constants.py @@ -14,6 +14,8 @@ '../..', 'tests', 'data', 'SWOT_L2_HR_RiverSP_Reach_548_011_NA_20230610T193337_20230610T193344_PIA1_01.zip' # noqa E501 )) +TEST_REACH_PATHNAME = ( + "SWOT_L2_HR_RiverSP_2.0/SWOT_L2_HR_RiverSP_Reach_548_011_NA_20230610T193337_20230610T193344_PIA1_01.zip") TEST_REACH_FILENAME = ( "SWOT_L2_HR_RiverSP_Reach_548_011_NA_" @@ -31,6 +33,9 @@ DB_TEST_TABLE_NAME = "hydrocron-swot-test-table" API_TEST_REACH_TABLE_NAME = "hydrocron-swot-reach-table" +API_TEST_NODE_TABLE_NAME = "hydrocron-swot-node-table" +TEST_REACH_COLLECTION_NAME = "SWOT_L2_HR_RiverSP_2.0" +TEST_REACH_TRACK_INGEST_TABLE_NAME = "hydrocron-swot-reach-track-ingest-table" TEST_REACH_PARTITION_KEY_NAME = 'reach_id' TEST_REACH_SORT_KEY_NAME = 'range_start_time' TEST_REACH_ID_VALUE = '71224100223' @@ -47,6 +52,9 @@ 'SWOT_L2_HR_LakeSP_Prior_018_100_GR_20240713T111741_20240713T112027_PIC0_01.zip' # noqa E501 )) +TEST_PLAKE_PATHNAME = ( + "SWOT_L2_HR_LakeSP_2.0/SWOT_L2_HR_LakeSP_Prior_018_100_GR_20240713T111741_20240713T112027_PIC0_01.zip") + TEST_PLAKE_FILENAME = ( "SWOT_L2_HR_LakeSP_Prior_018_100_GR_20240713T111741_20240713T112027_PIC0_01.zip") @@ -113,6 +121,7 @@ DB_TEST_PLAKE_TABLE_NAME = "hydrocron-swot-testlake-table" API_TEST_PLAKE_TABLE_NAME = "hydrocron-swot-prior-lake-table" +TEST_PLAKE_COLLECTION_NAME = "SWOT_L2_HR_LakeSP_2.0" TEST_PLAKE_PARTITION_KEY_NAME = 'lake_id' TEST_PLAKE_SORT_KEY_NAME = 'range_start_time' TEST_PLAKE_ID_VALUE = '9130047472' @@ -125,19 +134,48 @@ # ------------ # # PROD CONSTANTS # # ------------ # -SWOT_REACH_TABLE_NAME = "hydrocron-swot-reach-table" -SWOT_NODE_TABLE_NAME = "hydrocron-swot-node-table" -SWOT_PRIOR_LAKE_TABLE_NAME = "hydrocron-swot-prior-lake-table" -SWOT_REACH_TRACK_INGEST_TABLE_NAME = "hydrocron-swot-reach-track-ingest-table" -SWOT_NODE_TRACK_INGEST_TABLE_NAME = "hydrocron-swot-node-track-ingest-table" -SWOT_PRIOR_LAKE_TRACK_INGEST_TABLE_NAME = "hydrocron-swot-prior-lake-track-ingest-table" +TABLE_COLLECTION_INFO = [ + {'collection_name': 'SWOT_L2_HR_RiverSP_2.0', + 'table_name': 'hydrocron-swot-reach-table', + 'track_table': 'hydrocron-swot-reach-track-ingest-table', + 'feature_type': 'Reach', + 'feature_id': 'reach_id' + }, + {'collection_name': 'SWOT_L2_HR_RiverSP_2.0', + 'table_name': 'hydrocron-swot-node-table', + 'track_table': 'hydrocron-swot-node-track-ingest-table', + 'feature_type': 'Node', + 'feature_id': 'node_id' + }, + {'collection_name': 'SWOT_L2_HR_LakeSP_2.0', + 'table_name': 'hydrocron-swot-prior-lake-table', + 'track_table': 'hydrocron-swot-prior-lake-track-ingest-table', + 'feature_type': 'LakeSP_Prior', + 'feature_id': 'lake_id' + }, + {'collection_name': 'SWOT_L2_HR_RiverSP_D', + 'table_name': 'hydrocron-SWOT_L2_HR_RiverSP_D-reach-table', + 'track_table': 'hydrocron-SWOT_L2_HR_RiverSP_D-reach-track-ingest', + 'feature_type': 'Reach', + 'feature_id': 'reach_id' + }, + {'collection_name': 'SWOT_L2_HR_RiverSP_D', + 'table_name': 'hydrocron-SWOT_L2_HR_RiverSP_D-node-table', + 'track_table': 'hydrocron-SWOT_L2_HR_RiverSP_D-node-track-ingest', + 'feature_type': 'Node', + 'feature_id': 'node_id' + }, + {'collection_name': 'SWOT_L2_HR_LakeSP_D', + 'table_name': 'hydrocron-SWOT_L2_HR_LakeSP_D-prior-lake-table', + 'track_table': 'hydrocron-SWOT_L2_HR_LakeSP_D-prior-lake-track-ingest', + 'feature_type': 'LakeSP_Prior', + 'feature_id': 'lake_id' + } +] +SWOT_REACH_TABLE_NAME = 'hydrocron-swot-reach-table' +SWOT_NODE_TABLE_NAME = 'hydrocron-swot-node-table' +SWOT_PRIOR_LAKE_TABLE_NAME = 'hydrocron-swot-prior-lake-table' -SWOT_REACH_COLLECTION_NAME = "SWOT_L2_HR_RiverSP_2.0" -SWOT_NODE_COLLECTION_NAME = "SWOT_L2_HR_RiverSP_2.0" -SWOT_PRIOR_LAKE_COLLECTION_NAME = "SWOT_L2_HR_LakeSP_2.0" -SWOT_REACH_COLLECTION_VERSION = SWOT_REACH_COLLECTION_NAME[19:] -SWOT_NODE_COLLECTION_VERSION = SWOT_NODE_COLLECTION_NAME[19:] -SWOT_PRIOR_LAKE_COLLECTION_VERSION = SWOT_PRIOR_LAKE_COLLECTION_NAME[18:] SWOT_PRIOR_LAKE_FILL_GEOMETRY_COORDS = ( (-31.286028054129474, -27.207309600925463), (-22.19117572552625, -28.812946226841383), diff --git a/terraform/hydrocron-dynamo.tf b/terraform/hydrocron-dynamo.tf index 1fb63e0..0d3297d 100644 --- a/terraform/hydrocron-dynamo.tf +++ b/terraform/hydrocron-dynamo.tf @@ -27,6 +27,35 @@ resource "aws_dynamodb_table" "hydrocron-swot-reach-table" { } } +resource "aws_dynamodb_table" "hydrocron-SWOT_L2_HR_RiverSP_D-reach-table" { + name = "hydrocron-SWOT_L2_HR_RiverSP_D-reach-table" + billing_mode = "PAY_PER_REQUEST" + hash_key = "reach_id" + range_key = "range_start_time" + attribute { + name = "reach_id" + type = "S" + } + attribute { + name = "range_start_time" + type = "S" + } + attribute { + name = "granuleUR" + type = "S" + } + global_secondary_index { + name = "GranuleURIndex" + hash_key = "granuleUR" + range_key = "range_start_time" + projection_type = "INCLUDE" + non_key_attributes = ["reach_id", "collection_shortname", "collection_version", "crid", "cycle_id", "pass_id", "continent_id", "ingest_time"] + } + point_in_time_recovery { + enabled = var.stage == "ops" ? true : false + } +} + resource "aws_dynamodb_table" "hydrocron-swot-node-table" { name = "hydrocron-swot-node-table" billing_mode = "PAY_PER_REQUEST" @@ -57,6 +86,36 @@ resource "aws_dynamodb_table" "hydrocron-swot-node-table" { } } +resource "aws_dynamodb_table" "hydrocron-SWOT_L2_HR_RiverSP_D-node-table" { + name = "hydrocron-SWOT_L2_HR_RiverSP_D-node-table" + billing_mode = "PAY_PER_REQUEST" + hash_key = "node_id" + range_key = "range_start_time" + attribute { + name = "node_id" + type = "S" + } + attribute { + name = "range_start_time" + type = "S" + } + attribute { + name = "granuleUR" + type = "S" + } + + global_secondary_index { + name = "GranuleURIndex" + hash_key = "granuleUR" + range_key = "range_start_time" + projection_type = "INCLUDE" + non_key_attributes = ["node_id", "collection_shortname", "collection_version", "crid", "cycle_id", "pass_id", "continent_id", "ingest_time"] + } + point_in_time_recovery { + enabled = var.stage == "ops" ? true : false + } +} + resource "aws_dynamodb_table" "hydrocron-swot-prior-lake-table" { name = "hydrocron-swot-prior-lake-table" billing_mode = "PAY_PER_REQUEST" @@ -87,6 +146,36 @@ resource "aws_dynamodb_table" "hydrocron-swot-prior-lake-table" { } } +resource "aws_dynamodb_table" "hydrocron-SWOT_L2_HR_LakeSP_D-prior-lake-table" { + name = "hydrocron-SWOT_L2_HR_LakeSP_D-prior-lake-table" + billing_mode = "PAY_PER_REQUEST" + hash_key = "lake_id" + range_key = "range_start_time" + attribute { + name = "lake_id" + type = "S" + } + attribute { + name = "range_start_time" + type = "S" + } + attribute { + name = "granuleUR" + type = "S" + } + + global_secondary_index { + name = "GranuleURIndex" + hash_key = "granuleUR" + range_key = "range_start_time" + projection_type = "INCLUDE" + non_key_attributes = ["lake_id", "collection_shortname", "collection_version", "crid", "cycle_id", "pass_id", "continent_id", "ingest_time"] + } + point_in_time_recovery { + enabled = var.stage == "ops" ? true : false + } +} + resource "aws_dynamodb_table" "hydrocron-reach-track-ingest-table" { name = "hydrocron-swot-reach-track-ingest-table" billing_mode = "PAY_PER_REQUEST" diff --git a/tests/conftest.py b/tests/conftest.py index b8c97f7..8200e29 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -45,9 +45,10 @@ dynamo_db_resource = factories.dynamodb("dynamo_test_proc") + def create_tables(dynamo_db, table_name, feature_id, non_key_atts): """Create DynamoDB tables for testing.""" - + dynamo_db.create_table( TableName=table_name, AttributeDefinitions=[ @@ -112,20 +113,20 @@ def hydrocron_dynamo_instance(request, dynamo_test_proc): create_tables( dynamo_db, - constants.SWOT_REACH_TABLE_NAME, + constants.API_TEST_REACH_TABLE_NAME, 'reach_id', ['reach_id', 'collection_shortname', 'collection_version', 'crid', 'cycle_id', 'pass_id', 'continent_id', 'ingest_time'] ) create_tables( dynamo_db, - constants.SWOT_PRIOR_LAKE_TABLE_NAME, + constants.API_TEST_PLAKE_TABLE_NAME, 'lake_id', ['lake_id', 'collection_shortname', 'collection_version', 'crid', 'cycle_id', 'pass_id', 'continent_id', 'ingest_time'] ) # load reach table - reach_hydro_table = HydrocronTable(dynamo_db, constants.SWOT_REACH_TABLE_NAME) + reach_hydro_table = HydrocronTable(dynamo_db, constants.API_TEST_REACH_TABLE_NAME) reach_items = swot_shp.read_shapefile( TEST_SHAPEFILE_PATH_REACH, obscure_data=False, @@ -134,7 +135,7 @@ def hydrocron_dynamo_instance(request, dynamo_test_proc): reach_hydro_table.add_data(**item_attrs) # load lake table - lake_hydro_table = HydrocronTable(dynamo_db, constants.SWOT_PRIOR_LAKE_TABLE_NAME) + lake_hydro_table = HydrocronTable(dynamo_db, constants.API_TEST_PLAKE_TABLE_NAME) lake_items = swot_shp.read_shapefile( TEST_SHAPEFILE_PATH_LAKE, obscure_data=False, @@ -272,11 +273,11 @@ def track_ingest_dynamo_instance(request, dynamo_test_proc): # reach table create_tables( dynamo_db, - constants.SWOT_REACH_TABLE_NAME, + constants.API_TEST_REACH_TABLE_NAME, 'reach_id', ['reach_id', 'collection_shortname', 'collection_version', 'crid', 'cycle_id', 'pass_id', 'continent_id', 'ingest_time'] ) - reach_hydro_table = HydrocronTable(dynamo_db, constants.SWOT_REACH_TABLE_NAME) + reach_hydro_table = HydrocronTable(dynamo_db, constants.API_TEST_REACH_TABLE_NAME) reach_items = swot_shp.read_shapefile( TEST_SHAPEFILE_PATH_REACH_TRACK, obscure_data=False, @@ -292,7 +293,7 @@ def track_ingest_dynamo_instance(request, dynamo_test_proc): # track table dynamo_db.create_table( - TableName=constants.SWOT_REACH_TRACK_INGEST_TABLE_NAME, + TableName=constants.TEST_REACH_TRACK_INGEST_TABLE_NAME, AttributeDefinitions=[ {'AttributeName': 'granuleUR', 'AttributeType': 'S'}, {'AttributeName': 'revision_date', 'AttributeType': 'S'}, @@ -332,7 +333,7 @@ def track_ingest_dynamo_instance(request, dynamo_test_proc): } ] ) - track_reach_table = HydrocronTable(dynamo_db, constants.SWOT_REACH_TRACK_INGEST_TABLE_NAME) + track_reach_table = HydrocronTable(dynamo_db, constants.TEST_REACH_TRACK_INGEST_TABLE_NAME) track_items = swot_shp.read_shapefile( TEST_SHAPEFILE_PATH_REACH_TRACK, obscure_data=False, diff --git a/tests/load_data_local.py b/tests/load_data_local.py index f77fbcf..a472c80 100644 --- a/tests/load_data_local.py +++ b/tests/load_data_local.py @@ -14,7 +14,7 @@ hydrocron.db.load_data.load_data( - HydrocronTable(hydrocron.api.hydrocron.data_repository._dynamo_instance, constants.SWOT_REACH_TABLE_NAME), + HydrocronTable(hydrocron.api.hydrocron.data_repository._dynamo_instance, constants.API_TEST_REACH_TABLE_NAME), os.path.join( os.path.dirname(os.path.realpath(__file__)), 'data', @@ -22,7 +22,7 @@ ), True) hydrocron.db.load_data.load_data(HydrocronTable( - hydrocron.api.hydrocron.data_repository._dynamo_instance, constants.SWOT_NODE_TABLE_NAME), os.path.join( + hydrocron.api.hydrocron.data_repository._dynamo_instance, constants.API_TEST_NODE_TABLE_NAME), os.path.join( os.path.dirname(os.path.realpath(__file__)), 'data', 'SWOT_L2_HR_RiverSP_Node_540_010_AS_20230602T193520_20230602T193521_PIA1_01.zip' # noqa diff --git a/tests/test_data/api_query_results_csv.csv b/tests/test_data/api_query_results_csv.csv index ecda0f4..2ab2fce 100644 --- a/tests/test_data/api_query_results_csv.csv +++ b/tests/test_data/api_query_results_csv.csv @@ -1,2 +1,2 @@ reach_id,time_str,wse,sword_version,collection_shortname,crid,geometry,wse_units -71224100223,2023-06-10T19:39:43Z,286.2983,15,SWOT_L2_HR_RiverSP_2.0,PIA1,"LINESTRING (-95.564991 50.223686, -95.564559 50.223479, -95.564133 50.223381, -95.563713 50.22339, -95.563296 50.223453, -95.562884 50.223624, -95.562473 50.223795, -95.562062 50.223966, -95.56165 50.224137, -95.561242 50.224362, -95.560917 50.224585, -95.560595 50.224862, -95.560271 50.225085, -95.559946 50.225308, -95.559946 50.225308, -95.559213 50.225756, -95.558804 50.225981, -95.558567 50.226256, -95.558413 50.226529, -95.558343 50.226801, -95.558274 50.227072, -95.558288 50.227342, -95.558303 50.227611, -95.558317 50.227881, -95.558416 50.228148, -95.558514 50.228416, -95.558697 50.228682, -95.558795 50.22895, -95.558978 50.229216, -95.559076 50.229483, -95.559259 50.229749, -95.559357 50.230017, -95.559455 50.230284, -95.55947 50.230554, -95.559484 50.230823, -95.559583 50.231091, -95.559765 50.231357, -95.559864 50.231625, -95.559878 50.231894, -95.559809 50.232166, -95.559571 50.232441, -95.559165 50.23272, -95.558757 50.232944, -95.558348 50.233169, -95.557939 50.233394, -95.55753 50.233619, -95.557206 50.233842, -95.556884 50.234119, -95.556562 50.234396, -95.556241 50.234673, -95.556003 50.234948, -95.555681 50.235225, -95.555443 50.2355, -95.555206 50.235775, -95.555136 50.236047, -95.555066 50.236318, -95.555081 50.236588, -95.555011 50.236859, -95.554941 50.23713, -95.554701 50.237351, -95.554376 50.237575, -95.554052 50.237798, -95.553727 50.238021, -95.553727 50.238021, -95.55308 50.238521, -95.552843 50.238796, -95.552521 50.239073, -95.552367 50.239346, -95.552297 50.239617, -95.552312 50.239887, -95.552326 50.240156, -95.552425 50.240424, -95.552439 50.240693, -95.552453 50.240963, -95.552468 50.241233, -95.552482 50.241502, -95.552497 50.241772, -95.552511 50.242041, -95.552525 50.242311, -95.552456 50.242582, -95.552299 50.242801, -95.552056 50.242969, -95.551728 50.243138, -95.551314 50.243255, -95.550899 50.243372, -95.550487 50.243543, -95.550076 50.243714, -95.549661 50.243831, -95.549249 50.244002, -95.548838 50.244173, -95.548423 50.24429, -95.548011 50.244461, -95.547603 50.244686, -95.547278 50.244909, -95.546953 50.245132, -95.546715 50.245407, -95.546561 50.24568, -95.546492 50.245951, -95.546422 50.246223, -95.546436 50.246492, -95.546367 50.246764, -95.546297 50.247035, -95.546143 50.247308, -95.54599 50.247582, -95.545752 50.247857, -95.545598 50.24813, -95.545444 50.248403, -95.545374 50.248674, -95.545305 50.248946, -95.545319 50.249215, -95.545333 50.249485, -95.545348 50.249754, -95.545446 50.250022, -95.545545 50.25029, -95.545727 50.250556, -95.54591 50.250822, -95.546176 50.251086, -95.546359 50.251351, -95.546625 50.251615, -95.546892 50.251879, -95.547075 50.252145, -95.547257 50.252411, -95.54744 50.252677, -95.547538 50.252945, -95.547553 50.253214, -95.547651 50.253482, -95.547581 50.253753, -95.547512 50.254025, -95.547442 50.254296, -95.547372 50.254568, -95.547303 50.254839, -95.547317 50.255108, -95.547247 50.25538, -95.547177 50.255651, -95.547192 50.255921, -95.547206 50.25619, -95.547221 50.25646, -95.547319 50.256728, -95.547418 50.256995, -95.547432 50.257265, -95.547446 50.257534, -95.547461 50.257804, -95.547475 50.258073, -95.547489 50.258343, -95.547504 50.258613, -95.547518 50.258882, -95.547449 50.259153, -95.547379 50.259425, -95.547309 50.259696, -95.547239 50.259968, -95.547086 50.260241, -95.547016 50.260512, -95.546946 50.260784, -95.546876 50.261055, -95.546807 50.261326, -95.546737 50.261598, -95.546583 50.261871, -95.546345 50.262146, -95.54602 50.262369, -95.545611 50.262594, -95.5452 50.262765, -95.544788 50.262936, -95.544373 50.263053, -95.543961 50.263224, -95.543546 50.263341, -95.543135 50.263512, -95.542723 50.263683, -95.542314 50.263907, -95.541989 50.26413, -95.541667 50.264407, -95.541345 50.264684, -95.541107 50.264959, -95.540869 50.265234, -95.540631 50.265509, -95.540393 50.265785, -95.540155 50.26606, -95.539917 50.266335, -95.539679 50.26661, -95.539441 50.266885, -95.539203 50.26716, -95.538962 50.267381, -95.538634 50.26755, -95.538304 50.267665, -95.537889 50.267782, -95.537471 50.267845, -95.537056 50.267962, -95.536642 50.268079, -95.536227 50.268196, -95.535809 50.268259, -95.535391 50.268323, -95.534974 50.268386, -95.534556 50.268449, -95.534138 50.268512, -95.533718 50.268521, -95.5333 50.268584, -95.532882 50.268647, -95.532552 50.268762, -95.532224 50.268931, -95.531986 50.269206, -95.531748 50.269481, -95.531594 50.269755, -95.531356 50.27003, -95.531202 50.270303, -95.531048 50.270576, -95.530978 50.270847, -95.530908 50.271119, -95.530923 50.271388, -95.530937 50.271658, -95.530951 50.271927, -95.53105 50.272195, -95.531148 50.272463, -95.531331 50.272729, -95.531513 50.272995, -95.531696 50.273261, -95.531878 50.273526, -95.532145 50.27379, -95.532327 50.274056, -95.532594 50.27432, -95.532861 50.274584, -95.533043 50.27485, -95.533142 50.275118, -95.53324 50.275386, -95.533339 50.275653, -95.533437 50.275921, -95.533536 50.276189, -95.533634 50.276457, -95.533732 50.276724, -95.533747 50.276994, -95.533761 50.277263, -95.533859 50.277531, -95.533958 50.277799, -95.53414 50.278065, -95.534323 50.278331, -95.534506 50.278596, -95.534688 50.278862, -95.534871 50.279128, -95.534969 50.279396, -95.535152 50.279662, -95.535334 50.279928, -95.535433 50.280195, -95.535615 50.280461, -95.535798 50.280727, -95.535812 50.280997, -95.535743 50.281268, -95.535589 50.281541, -95.535266 50.281818, -95.53486 50.282097, -95.534454 50.282376, -95.534132 50.282652, -95.533893 50.282927, -95.533824 50.283199, -95.533838 50.283468, -95.534021 50.283734, -95.534203 50.284, -95.53447 50.284264, -95.534652 50.28453, -95.534835 50.284796, -95.535018 50.285062, -95.5352 50.285328, -95.535383 50.285594, -95.535565 50.285859, -95.535832 50.286123, -95.536099 50.286387, -95.53645 50.28665, -95.536801 50.286912, -95.537152 50.287174, -95.537418 50.287438, -95.537601 50.287704, -95.5377 50.287972, -95.537798 50.288239, -95.537897 50.288507, -95.537995 50.288775, -95.538093 50.289043, -95.538192 50.28931, -95.538206 50.28958, -95.538221 50.289849, -95.538235 50.290119, -95.538334 50.290387, -95.538432 50.290654, -95.538531 50.290922, -95.538629 50.29119)",m \ No newline at end of file +71224100223,2023-06-10T19:39:43Z,286.2983,15,,PIA1,"LINESTRING (-95.564991 50.223686, -95.564559 50.223479, -95.564133 50.223381, -95.563713 50.22339, -95.563296 50.223453, -95.562884 50.223624, -95.562473 50.223795, -95.562062 50.223966, -95.56165 50.224137, -95.561242 50.224362, -95.560917 50.224585, -95.560595 50.224862, -95.560271 50.225085, -95.559946 50.225308, -95.559946 50.225308, -95.559213 50.225756, -95.558804 50.225981, -95.558567 50.226256, -95.558413 50.226529, -95.558343 50.226801, -95.558274 50.227072, -95.558288 50.227342, -95.558303 50.227611, -95.558317 50.227881, -95.558416 50.228148, -95.558514 50.228416, -95.558697 50.228682, -95.558795 50.22895, -95.558978 50.229216, -95.559076 50.229483, -95.559259 50.229749, -95.559357 50.230017, -95.559455 50.230284, -95.55947 50.230554, -95.559484 50.230823, -95.559583 50.231091, -95.559765 50.231357, -95.559864 50.231625, -95.559878 50.231894, -95.559809 50.232166, -95.559571 50.232441, -95.559165 50.23272, -95.558757 50.232944, -95.558348 50.233169, -95.557939 50.233394, -95.55753 50.233619, -95.557206 50.233842, -95.556884 50.234119, -95.556562 50.234396, -95.556241 50.234673, -95.556003 50.234948, -95.555681 50.235225, -95.555443 50.2355, -95.555206 50.235775, -95.555136 50.236047, -95.555066 50.236318, -95.555081 50.236588, -95.555011 50.236859, -95.554941 50.23713, -95.554701 50.237351, -95.554376 50.237575, -95.554052 50.237798, -95.553727 50.238021, -95.553727 50.238021, -95.55308 50.238521, -95.552843 50.238796, -95.552521 50.239073, -95.552367 50.239346, -95.552297 50.239617, -95.552312 50.239887, -95.552326 50.240156, -95.552425 50.240424, -95.552439 50.240693, -95.552453 50.240963, -95.552468 50.241233, -95.552482 50.241502, -95.552497 50.241772, -95.552511 50.242041, -95.552525 50.242311, -95.552456 50.242582, -95.552299 50.242801, -95.552056 50.242969, -95.551728 50.243138, -95.551314 50.243255, -95.550899 50.243372, -95.550487 50.243543, -95.550076 50.243714, -95.549661 50.243831, -95.549249 50.244002, -95.548838 50.244173, -95.548423 50.24429, -95.548011 50.244461, -95.547603 50.244686, -95.547278 50.244909, -95.546953 50.245132, -95.546715 50.245407, -95.546561 50.24568, -95.546492 50.245951, -95.546422 50.246223, -95.546436 50.246492, -95.546367 50.246764, -95.546297 50.247035, -95.546143 50.247308, -95.54599 50.247582, -95.545752 50.247857, -95.545598 50.24813, -95.545444 50.248403, -95.545374 50.248674, -95.545305 50.248946, -95.545319 50.249215, -95.545333 50.249485, -95.545348 50.249754, -95.545446 50.250022, -95.545545 50.25029, -95.545727 50.250556, -95.54591 50.250822, -95.546176 50.251086, -95.546359 50.251351, -95.546625 50.251615, -95.546892 50.251879, -95.547075 50.252145, -95.547257 50.252411, -95.54744 50.252677, -95.547538 50.252945, -95.547553 50.253214, -95.547651 50.253482, -95.547581 50.253753, -95.547512 50.254025, -95.547442 50.254296, -95.547372 50.254568, -95.547303 50.254839, -95.547317 50.255108, -95.547247 50.25538, -95.547177 50.255651, -95.547192 50.255921, -95.547206 50.25619, -95.547221 50.25646, -95.547319 50.256728, -95.547418 50.256995, -95.547432 50.257265, -95.547446 50.257534, -95.547461 50.257804, -95.547475 50.258073, -95.547489 50.258343, -95.547504 50.258613, -95.547518 50.258882, -95.547449 50.259153, -95.547379 50.259425, -95.547309 50.259696, -95.547239 50.259968, -95.547086 50.260241, -95.547016 50.260512, -95.546946 50.260784, -95.546876 50.261055, -95.546807 50.261326, -95.546737 50.261598, -95.546583 50.261871, -95.546345 50.262146, -95.54602 50.262369, -95.545611 50.262594, -95.5452 50.262765, -95.544788 50.262936, -95.544373 50.263053, -95.543961 50.263224, -95.543546 50.263341, -95.543135 50.263512, -95.542723 50.263683, -95.542314 50.263907, -95.541989 50.26413, -95.541667 50.264407, -95.541345 50.264684, -95.541107 50.264959, -95.540869 50.265234, -95.540631 50.265509, -95.540393 50.265785, -95.540155 50.26606, -95.539917 50.266335, -95.539679 50.26661, -95.539441 50.266885, -95.539203 50.26716, -95.538962 50.267381, -95.538634 50.26755, -95.538304 50.267665, -95.537889 50.267782, -95.537471 50.267845, -95.537056 50.267962, -95.536642 50.268079, -95.536227 50.268196, -95.535809 50.268259, -95.535391 50.268323, -95.534974 50.268386, -95.534556 50.268449, -95.534138 50.268512, -95.533718 50.268521, -95.5333 50.268584, -95.532882 50.268647, -95.532552 50.268762, -95.532224 50.268931, -95.531986 50.269206, -95.531748 50.269481, -95.531594 50.269755, -95.531356 50.27003, -95.531202 50.270303, -95.531048 50.270576, -95.530978 50.270847, -95.530908 50.271119, -95.530923 50.271388, -95.530937 50.271658, -95.530951 50.271927, -95.53105 50.272195, -95.531148 50.272463, -95.531331 50.272729, -95.531513 50.272995, -95.531696 50.273261, -95.531878 50.273526, -95.532145 50.27379, -95.532327 50.274056, -95.532594 50.27432, -95.532861 50.274584, -95.533043 50.27485, -95.533142 50.275118, -95.53324 50.275386, -95.533339 50.275653, -95.533437 50.275921, -95.533536 50.276189, -95.533634 50.276457, -95.533732 50.276724, -95.533747 50.276994, -95.533761 50.277263, -95.533859 50.277531, -95.533958 50.277799, -95.53414 50.278065, -95.534323 50.278331, -95.534506 50.278596, -95.534688 50.278862, -95.534871 50.279128, -95.534969 50.279396, -95.535152 50.279662, -95.535334 50.279928, -95.535433 50.280195, -95.535615 50.280461, -95.535798 50.280727, -95.535812 50.280997, -95.535743 50.281268, -95.535589 50.281541, -95.535266 50.281818, -95.53486 50.282097, -95.534454 50.282376, -95.534132 50.282652, -95.533893 50.282927, -95.533824 50.283199, -95.533838 50.283468, -95.534021 50.283734, -95.534203 50.284, -95.53447 50.284264, -95.534652 50.28453, -95.534835 50.284796, -95.535018 50.285062, -95.5352 50.285328, -95.535383 50.285594, -95.535565 50.285859, -95.535832 50.286123, -95.536099 50.286387, -95.53645 50.28665, -95.536801 50.286912, -95.537152 50.287174, -95.537418 50.287438, -95.537601 50.287704, -95.5377 50.287972, -95.537798 50.288239, -95.537897 50.288507, -95.537995 50.288775, -95.538093 50.289043, -95.538192 50.28931, -95.538206 50.28958, -95.538221 50.289849, -95.538235 50.290119, -95.538334 50.290387, -95.538432 50.290654, -95.538531 50.290922, -95.538629 50.29119)",m \ No newline at end of file diff --git a/tests/test_data/api_query_results_geojson.json b/tests/test_data/api_query_results_geojson.json index 597ae0f..cf08221 100644 --- a/tests/test_data/api_query_results_geojson.json +++ b/tests/test_data/api_query_results_geojson.json @@ -9,7 +9,7 @@ "time_str": "2023-06-10T19:39:43Z", "wse": "286.2983", "sword_version": "15", - "collection_shortname": "SWOT_L2_HR_RiverSP_2.0", + "collection_shortname": "", "crid": "PIA1", "wse_units": "m" }, diff --git a/tests/test_data/api_query_results_geojson_compact.json b/tests/test_data/api_query_results_geojson_compact.json index 6f2f455..daca4ae 100644 --- a/tests/test_data/api_query_results_geojson_compact.json +++ b/tests/test_data/api_query_results_geojson_compact.json @@ -18,7 +18,7 @@ "15" ], "collection_shortname": [ - "SWOT_L2_HR_RiverSP_2.0" + "" ], "crid": [ "PIA1" diff --git a/tests/test_data/api_query_results_geojson_lakes.json b/tests/test_data/api_query_results_geojson_lakes.json index cc2aae7..c509ea6 100644 --- a/tests/test_data/api_query_results_geojson_lakes.json +++ b/tests/test_data/api_query_results_geojson_lakes.json @@ -10,7 +10,7 @@ "wse": "-999999999999.0", "area_total": "-999999999999.0", "quality_f": "-999", - "collection_shortname": "SWOT_L2_HR_LakeSP_2.0", + "collection_shortname": "", "crid": "PIC0", "PLD_version": "105", "range_start_time": "2024-07-13T11:17:41Z", diff --git a/tests/test_data/api_query_results_items_lake.json b/tests/test_data/api_query_results_items_lake.json index 2730c5b..abc81fa 100644 --- a/tests/test_data/api_query_results_items_lake.json +++ b/tests/test_data/api_query_results_items_lake.json @@ -34,7 +34,7 @@ "p_lat_units": "degrees_north", "xovr_cal_c_units": "m", "ds1_l": "-999999999999.0", - "collection_version": "2.0", + "collection_version": "", "p_ref_area_units": "km^2", "dry_trop_c": "-999999999999.0", "lake_id": "9120274662", @@ -57,7 +57,7 @@ "n_overlap": "no_data", "area_det_u_units": "km^2", "pole_tide": "-999999999999.0", - "collection_shortname": "SWOT_L2_HR_LakeSP_2.0", + "collection_shortname": "", "p_ref_wse": "-999999999999.0", "solid_tide_units": "m", "p_lon": "-52.412107", @@ -134,7 +134,7 @@ "p_lat_units": "degrees_north", "xovr_cal_c_units": "m", "ds1_l": "-999999999999.0", - "collection_version": "2.0", + "collection_version": "", "p_ref_area_units": "km^2", "dry_trop_c": "-999999999999.0", "lake_id": "9120274662", @@ -157,7 +157,7 @@ "n_overlap": "no_data", "area_det_u_units": "km^2", "pole_tide": "-999999999999.0", - "collection_shortname": "SWOT_L2_HR_LakeSP_2.0", + "collection_shortname": "", "p_ref_wse": "-999999999999.0", "solid_tide_units": "m", "p_lon": "-52.412107", diff --git a/tests/test_hydrocron_database.py b/tests/test_hydrocron_database.py index f6469ae..87d54f8 100644 --- a/tests/test_hydrocron_database.py +++ b/tests/test_hydrocron_database.py @@ -90,6 +90,6 @@ def test_track_table_mismatch(): "track_table": "hydrocron-swot-prior-lake-track-ingest-table" } } - with pytest.raises(hydrocron.db.load_data.TableMisMatch) as e: + with pytest.raises(hydrocron.db.load_data.MissingTable) as e: hydrocron.db.load_data.granule_handler(event, None) - assert str(e.value) == "Error: Cannot load Observed or Unassigned Lake data into table: 'hydrocron-swot-prior-lake-table'" \ No newline at end of file + assert str(e.value) == "Error: Cannot load Observed or Unassigned Lake data" diff --git a/tests/test_io_swot_reach_node_shp.py b/tests/test_io_swot_reach_node_shp.py index c7db389..dc5e821 100644 --- a/tests/test_io_swot_reach_node_shp.py +++ b/tests/test_io_swot_reach_node_shp.py @@ -20,7 +20,7 @@ def test_parse_from_filename_reach(): Tests parsing cycle, pass, and time ranges from filename """ filename_attrs = swot_shp.parse_from_filename( - constants.TEST_REACH_FILENAME) + constants.TEST_REACH_PATHNAME) assert filename_attrs['cycle_id'] == "548" assert filename_attrs['pass_id'] == "011" @@ -28,8 +28,8 @@ def test_parse_from_filename_reach(): assert filename_attrs['range_start_time'] == "2023-06-10T19:33:37Z" assert filename_attrs['range_end_time'] == "2023-06-10T19:33:44Z" assert filename_attrs['crid'] == "PIA1" - assert filename_attrs['collection_shortname'] == constants.SWOT_REACH_COLLECTION_NAME - assert filename_attrs['collection_version'] == constants.SWOT_REACH_COLLECTION_VERSION + assert filename_attrs['collection_shortname'] == constants.TEST_REACH_COLLECTION_NAME + assert filename_attrs['collection_version'] == "" assert filename_attrs['granuleUR'] == constants.TEST_REACH_FILENAME assert datetime.strptime(filename_attrs['ingest_time'], "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=pytz.utc) - datetime.now(timezone.utc) <= timedelta(minutes=5) @@ -39,7 +39,7 @@ def test_parse_from_filename_lake(): Tests parsing cycle, pass, and time ranges from filename """ filename_attrs = swot_shp.parse_from_filename( - constants.TEST_PLAKE_FILENAME) + constants.TEST_PLAKE_PATHNAME) assert filename_attrs['cycle_id'] == "018" assert filename_attrs['pass_id'] == "100" @@ -47,8 +47,8 @@ def test_parse_from_filename_lake(): assert filename_attrs['range_start_time'] == "2024-07-13T11:17:41Z" assert filename_attrs['range_end_time'] == "2024-07-13T11:20:27Z" assert filename_attrs['crid'] == "PIC0" - assert filename_attrs['collection_shortname'] == constants.SWOT_PRIOR_LAKE_COLLECTION_NAME - assert filename_attrs['collection_version'] == constants.SWOT_PRIOR_LAKE_COLLECTION_VERSION + assert filename_attrs['collection_shortname'] == constants.TEST_PLAKE_COLLECTION_NAME + assert filename_attrs['collection_version'] == "" assert filename_attrs['granuleUR'] == constants.TEST_PLAKE_FILENAME assert datetime.strptime(filename_attrs['ingest_time'], "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=pytz.utc) - datetime.now(timezone.utc) <= timedelta(minutes=5) diff --git a/tests/test_track_ingest.py b/tests/test_track_ingest.py index 7e6c583..497e0f4 100644 --- a/tests/test_track_ingest.py +++ b/tests/test_track_ingest.py @@ -56,7 +56,7 @@ def test_get_granule_ur(track_ingest_fixture): data_repository = DynamoDataRepository(connection.dynamodb_resource) - table_name = constants.SWOT_REACH_TABLE_NAME + table_name = constants.API_TEST_REACH_TABLE_NAME granule_ur = "SWOT_L2_HR_RiverSP_Reach_020_149_NA_20240825T231711_20240825T231722_PIC0_01.zip" actual_data = data_repository.get_granule_ur(table_name, granule_ur) @@ -137,7 +137,7 @@ def test_get_status(track_ingest_fixture): from hydrocron.api.data_access.db import DynamoDataRepository hydrocron_table = DynamoDataRepository(hydrocron.utils.connection._dynamodb_resource) - items = hydrocron_table.get_status(constants.SWOT_REACH_TRACK_INGEST_TABLE_NAME, "to_ingest") + items = hydrocron_table.get_status(constants.TEST_REACH_TRACK_INGEST_TABLE_NAME, "to_ingest") expected = [{ "granuleUR": "SWOT_L2_HR_RiverSP_Reach_020_149_NA_20240825T231711_20240825T231722_PIC0_01.zip", "revision_date": "2024-05-22T19:15:44.572Z", @@ -161,7 +161,7 @@ def test_get_series_granule_ur(track_ingest_fixture): from hydrocron.api.data_access.db import DynamoDataRepository hydrocron_table = DynamoDataRepository(hydrocron.utils.connection._dynamodb_resource) - table_name = constants.SWOT_REACH_TABLE_NAME + table_name = constants.API_TEST_REACH_TABLE_NAME feature_name = "reach_id" granule_ur = "SWOT_L2_HR_RiverSP_Reach_020_149_NA_20240825T231711_20240825T231722_PIC0_01.zip" items = hydrocron_table.get_series_granule_ur(table_name, feature_name, granule_ur) @@ -182,8 +182,8 @@ def test_query_ingest(track_ingest_fixture): track._query_for_granule_ur = MagicMock(name="_query_for_granule_ur") track._query_for_granule_ur.return_value = "s3://podaac-swot-ops-cumulus-protected/SWOT_L2_HR_RiverSP_2.0/SWOT_L2_HR_RiverSP_Reach_020_149_NA_20240825T231711_20240825T231722_PIC0_01.zip" - hydrocron_track_table = constants.SWOT_REACH_TRACK_INGEST_TABLE_NAME - hydrocron_table = constants.SWOT_REACH_TABLE_NAME + hydrocron_track_table = constants.TEST_REACH_TRACK_INGEST_TABLE_NAME + hydrocron_table = constants.API_TEST_REACH_TABLE_NAME track.query_track_ingest(hydrocron_track_table, hydrocron_table) expected = [{ @@ -213,7 +213,7 @@ def test_query_ingest_to_ingest(track_ingest_fixture): "SWOT_L2_HR_RiverSP_Reach_020_149_NA_20240825T231711_20240825T231722_PIC0_01.zip" ) - track_reach_table = HydrocronTable(hydrocron.utils.connection._dynamodb_resource, constants.SWOT_REACH_TRACK_INGEST_TABLE_NAME) + track_reach_table = HydrocronTable(hydrocron.utils.connection._dynamodb_resource, constants.TEST_REACH_TRACK_INGEST_TABLE_NAME) track_ingest_record = [{ "granuleUR": os.path.basename(TEST_SHAPEFILE_PATH_REACH_TRACK), "revision_date": "2024-05-22T19:15:44.572Z", @@ -230,8 +230,8 @@ def test_query_ingest_to_ingest(track_ingest_fixture): track._query_for_granule_ur = MagicMock(name="_query_for_granule_ur") track._query_for_granule_ur.return_value = "s3://podaac-swot-ops-cumulus-protected/SWOT_L2_HR_RiverSP_2.0/SWOT_L2_HR_RiverSP_Reach_020_149_NA_20240825T231711_20240825T231722_PIC0_01.zip" - hydrocron_track_table = constants.SWOT_REACH_TRACK_INGEST_TABLE_NAME - hydrocron_table = constants.SWOT_REACH_TABLE_NAME + hydrocron_track_table = constants.TEST_REACH_TRACK_INGEST_TABLE_NAME + hydrocron_table = constants.API_TEST_REACH_TABLE_NAME track.query_track_ingest(hydrocron_track_table, hydrocron_table) expected = [{ @@ -267,10 +267,10 @@ def test_update_track_to_ingest(track_ingest_fixture): "actual_feature_count": 0, "status": "to_ingest" }] - track.update_track_ingest(constants.SWOT_REACH_TRACK_INGEST_TABLE_NAME) + track.update_track_ingest(constants.TEST_REACH_TRACK_INGEST_TABLE_NAME) dynamodb = hydrocron.utils.connection._dynamodb_resource - table = dynamodb.Table(constants.SWOT_REACH_TRACK_INGEST_TABLE_NAME) + table = dynamodb.Table(constants.TEST_REACH_TRACK_INGEST_TABLE_NAME) table.load() actual_item = table.query( KeyConditionExpression=(Key("granuleUR").eq("SWOT_L2_HR_RiverSP_Reach_010_177_NA_20240131T074748_20240131T074759_PIC0_01.zip")) @@ -300,10 +300,10 @@ def test_update_track_ingested(track_ingest_fixture): "expected_feature_count":664, "actual_feature_count": 664, }] - track.update_track_ingest(constants.SWOT_REACH_TRACK_INGEST_TABLE_NAME) + track.update_track_ingest(constants.TEST_REACH_TRACK_INGEST_TABLE_NAME) dynamodb = hydrocron.utils.connection._dynamodb_resource - table = dynamodb.Table(constants.SWOT_REACH_TRACK_INGEST_TABLE_NAME) + table = dynamodb.Table(constants.TEST_REACH_TRACK_INGEST_TABLE_NAME) table.load() actual_item = table.query( KeyConditionExpression=(Key("granuleUR").eq("SWOT_L2_HR_RiverSP_Reach_020_149_NA_20240825T231711_20240825T231722_PIC0_01.zip"))