Skip to content

Commit 1e25ca4

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 73fc445 commit 1e25ca4

File tree

12 files changed

+59
-49
lines changed

12 files changed

+59
-49
lines changed

pipelines/rj_cor/bot_semaforo/tasks.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -154,7 +154,7 @@ def current_date_time():
154154

155155
# Builds all alert messages
156156
alert = None
157-
thumbs_up_emoji = "\U0001F44D"
157+
thumbs_up_emoji = "\U0001f44d"
158158
current_minus_1h, current = current_date_time()
159159

160160
mask = (dataframe["initial_ts"] > current_minus_1h) & (
@@ -176,7 +176,7 @@ def current_date_time():
176176
)
177177
alert = "".join(filered_alerts["alert"].tolist())
178178

179-
traffic_light_emoji = "\U0001F6A6"
179+
traffic_light_emoji = "\U0001f6a6"
180180
msg_header = (
181181
traffic_light_emoji
182182
+ " CETRIO"

pipelines/rj_cor/meteorologia/precipitacao_inea/tasks.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -131,9 +131,9 @@ def treat_data(
131131
].copy()
132132

133133
# Replace all values bigger than 10000 on "altura_agua" to nan
134-
dfr_fluviometric.loc[
135-
dfr_fluviometric["altura_agua"] > 10000, "altura_agua"
136-
] = np.nan
134+
dfr_fluviometric.loc[dfr_fluviometric["altura_agua"] > 10000, "altura_agua"] = (
135+
np.nan
136+
)
137137

138138
fluviometric_cols_order = [
139139
"id_estacao",

pipelines/rj_cor/meteorologia/radar/precipitacao/src/data/process/process_ppi.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -316,18 +316,18 @@ def process_ppi(
316316
: hdf[primary_key]["where"].attrs["nbins"],
317317
el_index,
318318
] = rhohv_data_array
319-
full_startazA_matrix[
320-
indices_dict[primary_key]["indices"], :, el_index
321-
] = startazA.reshape(startazA.shape[0], 1)
322-
full_stopazA_matrix[
323-
indices_dict[primary_key]["indices"], :, el_index
324-
] = stopazA.reshape(stopazA.shape[0], 1)
325-
full_startazT_matrix[
326-
indices_dict[primary_key]["indices"], :, el_index
327-
] = startazT.reshape(startazT.shape[0], 1)
328-
full_stopazT_matrix[
329-
indices_dict[primary_key]["indices"], :, el_index
330-
] = stopazT.reshape(stopazT.shape[0], 1)
319+
full_startazA_matrix[indices_dict[primary_key]["indices"], :, el_index] = (
320+
startazA.reshape(startazA.shape[0], 1)
321+
)
322+
full_stopazA_matrix[indices_dict[primary_key]["indices"], :, el_index] = (
323+
stopazA.reshape(stopazA.shape[0], 1)
324+
)
325+
full_startazT_matrix[indices_dict[primary_key]["indices"], :, el_index] = (
326+
startazT.reshape(startazT.shape[0], 1)
327+
)
328+
full_stopazT_matrix[indices_dict[primary_key]["indices"], :, el_index] = (
329+
stopazT.reshape(stopazT.shape[0], 1)
330+
)
331331

332332
full_elevation_matrix[:, :, el_index] = hdf[primary_key]["where"].attrs[
333333
"elangle"

pipelines/rj_cor/meteorologia/radar/precipitacao/src/predict_rain.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -114,9 +114,9 @@ def run_model_prediction(
114114
)
115115

116116
assert new_radar_data.nrays == NRAYS, f"nrays should be {NRAYS}."
117-
radar_data_dict[
118-
f"{specs_dict['process_type']}-{specs_dict['feature']}"
119-
] = new_radar_data
117+
radar_data_dict[f"{specs_dict['process_type']}-{specs_dict['feature']}"] = (
118+
new_radar_data
119+
)
120120

121121
radar_data_hdfs.append(radar_data_dict)
122122

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,2 @@
11
# -*- coding: utf-8 -*-
2-
"""
3-
"""
2+
""" """

pipelines/rj_smtr/br_rj_riodejaneiro_diretorios/flows.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -61,9 +61,11 @@
6161
LABELS = get_current_flow_labels()
6262

6363
table_params = task(
64-
lambda tables, exclude: [t for t in tables if t["table_id"] not in exclude]
65-
if exclude is not None
66-
else tables,
64+
lambda tables, exclude: (
65+
[t for t in tables if t["table_id"] not in exclude]
66+
if exclude is not None
67+
else tables
68+
),
6769
checkpoint=False,
6870
name="get_tables_to_run",
6971
)(tables=constants.DIRETORIO_MATERIALIZACAO_TABLE_PARAMS.value, exclude=exclude)

pipelines/rj_smtr/br_rj_riodejaneiro_rdo/tasks.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -120,9 +120,9 @@ def download_and_save_local_from_ftp(file_info: dict):
120120
)
121121

122122
# Set general local path to save file (bucket_modes: raw or staging)
123-
file_info[
124-
"local_path"
125-
] = f"""{base_path}/{table_id}/{file_info["partitions"]}/{file_info['filename']}.{{file_ext}}"""
123+
file_info["local_path"] = (
124+
f"""{base_path}/{table_id}/{file_info["partitions"]}/{file_info['filename']}.{{file_ext}}"""
125+
)
126126
# Get raw data
127127
file_info["raw_path"] = file_info["local_path"].format(
128128
bucket_mode="raw", file_ext="txt"

pipelines/rj_smtr/projeto_subsidio_sppo/tasks.py

Lines changed: 18 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -63,9 +63,9 @@ def subsidio_data_quality_check(
6363

6464
if mode == "pos":
6565
request_params["end_timestamp"] = f"""{params["end_date"]} 00:00:00"""
66-
request_params[
67-
"dataset_id"
68-
] = smtr_constants.SUBSIDIO_SPPO_DASHBOARD_DATASET_ID.value
66+
request_params["dataset_id"] = (
67+
smtr_constants.SUBSIDIO_SPPO_DASHBOARD_DATASET_ID.value
68+
)
6969

7070
checks_list = (
7171
smtr_constants.SUBSIDIO_SPPO_DATA_CHECKS_PRE_LIST.value
@@ -139,14 +139,21 @@ def subsidio_data_quality_check(
139139

140140
if not test_check:
141141
at_code_owners = [
142-
f' - <@{constants.OWNERS_DISCORD_MENTIONS.value[code_owner]["user_id"]}>\n'
143-
if constants.OWNERS_DISCORD_MENTIONS.value[code_owner]["type"] == "user"
144-
else f' - <@!{constants.OWNERS_DISCORD_MENTIONS.value[code_owner]["user_id"]}>\n'
145-
if constants.OWNERS_DISCORD_MENTIONS.value[code_owner]["type"]
146-
== "user_nickname"
147-
else f' - <#{constants.OWNERS_DISCORD_MENTIONS.value[code_owner]["user_id"]}>\n'
148-
if constants.OWNERS_DISCORD_MENTIONS.value[code_owner]["type"] == "channel"
149-
else f' - <@&{constants.OWNERS_DISCORD_MENTIONS.value[code_owner]["user_id"]}>\n'
142+
(
143+
f' - <@{constants.OWNERS_DISCORD_MENTIONS.value[code_owner]["user_id"]}>\n'
144+
if constants.OWNERS_DISCORD_MENTIONS.value[code_owner]["type"] == "user"
145+
else (
146+
f' - <@!{constants.OWNERS_DISCORD_MENTIONS.value[code_owner]["user_id"]}>\n'
147+
if constants.OWNERS_DISCORD_MENTIONS.value[code_owner]["type"]
148+
== "user_nickname"
149+
else (
150+
f' - <#{constants.OWNERS_DISCORD_MENTIONS.value[code_owner]["user_id"]}>\n'
151+
if constants.OWNERS_DISCORD_MENTIONS.value[code_owner]["type"]
152+
== "channel"
153+
else f' - <@&{constants.OWNERS_DISCORD_MENTIONS.value[code_owner]["user_id"]}>\n'
154+
)
155+
)
156+
)
150157
for code_owner in code_owners
151158
]
152159

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
# -*- coding: utf-8 -*-
2-
"""" Init file for execute_dbt_model module """
2+
""" " Init file for execute_dbt_model module"""

pipelines/utils/georeference/tasks.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -115,12 +115,14 @@ def georeference_dataframe(
115115
locations.append(location)
116116

117117
geolocated_addresses = [
118-
{
119-
"latitude": location.latitude,
120-
"longitude": location.longitude,
121-
}
122-
if location is not None
123-
else {"latitude": None, "longitude": None}
118+
(
119+
{
120+
"latitude": location.latitude,
121+
"longitude": location.longitude,
122+
}
123+
if location is not None
124+
else {"latitude": None, "longitude": None}
125+
)
124126
for location in locations
125127
]
126128

0 commit comments

Comments
 (0)