Skip to content

Commit 42acd33

Browse files
committed
Remove --delete-source option from consolidate scripts.
1 parent 604ab1e commit 42acd33

File tree

3 files changed

+5
-57
lines changed

3 files changed

+5
-57
lines changed

consolidator.py

Lines changed: 2 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import unicodecsv as csv
99
from dotenv import load_dotenv
1010

11-
from utils import configure_logging, delete_logs, download_logs, upload_log
11+
from utils import configure_logging, download_logs, upload_log
1212

1313
# Load environment variables.
1414
load_dotenv()
@@ -103,9 +103,8 @@ def consolidate_logs(
103103
container_src="access-logs-json",
104104
container_dest="access-logs",
105105
container_dest_errors="error-logs",
106-
delete_source=False,
107106
):
108-
"""Download logs for the specified timestamp and services, consolidate, upload and optionally delete the source logs."""
107+
"""Download logs for the specified timestamp and services, consolidate and upload the source logs."""
109108
if SENTRY_CRON_URL:
110109
LOGGER.info("Signalling Sentry monitor (in progress)")
111110
requests.get(f"{SENTRY_CRON_URL}?status=in_progress")
@@ -124,9 +123,6 @@ def consolidate_logs(
124123
out_log_errors = consolidate_json_errors(timestamp, temp_dir.name, temp_dir.name)
125124
# Upload consolidated CSV errors to blob storage.
126125
upload_log(out_log_errors, container_dest_errors, CONN_STR, slow_connection=True)
127-
# Optionally deleting JSON logs from blob storage.
128-
if delete_source:
129-
delete_logs(timestamp, hosts, container_src, CONN_STR, True)
130126

131127
if SENTRY_CRON_URL:
132128
LOGGER.info("Signalling Sentry monitor (completed)")
@@ -175,18 +171,11 @@ def consolidate_logs(
175171
action="store",
176172
required=False,
177173
)
178-
parser.add_argument(
179-
"--delete-source",
180-
help="Delete the source CSV after processing (optional)",
181-
action="store_true",
182-
required=False,
183-
)
184174
args = parser.parse_args()
185175
consolidate_logs(
186176
timestamp=args.timestamp,
187177
hosts=args.hosts,
188178
container_src=args.container,
189179
container_dest=args.destination_container,
190180
container_dest_errors=args.destination_container_errors,
191-
delete_source=args.delete_source,
192181
)

consolidator_fastly.py

Lines changed: 3 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
import unicodecsv as csv
66
from dotenv import load_dotenv
77

8-
from utils import configure_logging, delete_logs, download_logs, upload_log
8+
from utils import configure_logging, download_logs, upload_log
99

1010
# Load environment variables.
1111
load_dotenv()
@@ -47,8 +47,8 @@ def consolidate_logfiles(timestamp, source_dir, destination_dir):
4747
return out_log
4848

4949

50-
def consolidate_fastly_logs(timestamp, services, container_src="fastly", container_dest="fastly", delete_source=False):
51-
"""Download logs for the specified timestamp and services, consolidate, upload and optionally delete the source logs."""
50+
def consolidate_fastly_logs(timestamp, services, container_src="fastly", container_dest="fastly"):
51+
"""Download logs for the specified timestamp and services, consolidate and upload the source logs."""
5252
# Use a temporary directory to download logs.
5353
temp_dir = TemporaryDirectory()
5454
# Download Fastly logs.
@@ -58,9 +58,6 @@ def consolidate_fastly_logs(timestamp, services, container_src="fastly", contain
5858
out_log = consolidate_logfiles(timestamp, temp_dir.name, temp_dir.name)
5959
# Upload consolidated CSV log to blob storage.
6060
upload_log(out_log, container_dest, CONN_STR)
61-
# Optionally deleting source logs from blob storage.
62-
if delete_source:
63-
delete_logs(timestamp, services, container_src, CONN_STR)
6461

6562

6663
if __name__ == "__main__":
@@ -97,17 +94,10 @@ def consolidate_fastly_logs(timestamp, services, container_src="fastly", contain
9794
action="store",
9895
required=False,
9996
)
100-
parser.add_argument(
101-
"--delete-source",
102-
help="Delete the source logs after processing (optional, default false)",
103-
action="store_true",
104-
required=False,
105-
)
10697
args = parser.parse_args()
10798
consolidate_fastly_logs(
10899
timestamp=args.timestamp,
109100
services=args.services,
110101
container_src=args.container,
111102
container_dest=args.destination_container,
112-
delete_source=args.delete_source,
113103
)

utils.py

Lines changed: 0 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -96,37 +96,6 @@ def download_logs(
9696
return True
9797

9898

99-
def delete_logs(timestamp, hosts, container_name, conn_str, nginx_host_log=False, enable_logging=True):
100-
"""Given the passed in timestamp and hosts list, delete blobs from the container."""
101-
if enable_logging:
102-
logger = logging.getLogger()
103-
104-
container_client = ContainerClient.from_connection_string(conn_str, container_name)
105-
log_list = []
106-
hosts_list = hosts.split(",")
107-
108-
for host in hosts_list:
109-
if nginx_host_log:
110-
blob_list = container_client.list_blobs(name_starts_with=f"{host}/nginx_access.{timestamp}")
111-
else:
112-
blob_list = container_client.list_blobs(name_starts_with=f"{host}/{timestamp}")
113-
log_list += [b for b in blob_list]
114-
115-
for blob in log_list:
116-
blob_client = BlobClient.from_connection_string(conn_str, container_name, blob.name)
117-
if enable_logging:
118-
logger.info(f"Deleting blob {blob.name} from {container_name} container")
119-
try:
120-
blob_client.delete_blob()
121-
except Exception as e:
122-
if enable_logging:
123-
logger.error(f"Exception during deletion of {blob.name}, aborting")
124-
logger.exception(e)
125-
return
126-
127-
return True
128-
129-
13099
def upload_log(source_path, container_name, conn_str, overwrite=True, enable_logging=True, blob_name="", slow_connection=False):
131100
"""Upload a single log at `source_path` to Azure blob storage (`blob_name` destination name is optional)."""
132101
if not blob_name:

0 commit comments

Comments
 (0)