|
| 1 | +#! /bin/bash |
| 2 | +# Deploys or deletes broker Cloud Functions |
| 3 | +# This script will not delete Cloud Functions that are in production |
| 4 | + |
| 5 | +testid="${1:-test}" |
| 6 | +# "False" uses production resources |
| 7 | +# any other string will be appended to the names of all resources |
| 8 | +teardown="${2:-False}" |
| 9 | +# "True" tearsdown/deletes resources, else setup |
| 10 | +survey="${3:-lsst}" |
| 11 | +# name of the survey this broker instance will ingest |
| 12 | +versiontag="${4:-v3_3}" |
| 13 | +region="${5:-us-central1}" |
| 14 | +PROJECT_ID=$GOOGLE_CLOUD_PROJECT # get the environment variable |
| 15 | + |
| 16 | +#--- GCP resources used in this script |
| 17 | +avro_bucket="${PROJECT_ID}-${survey}_alerts_${versiontag}" |
| 18 | +avro_topic="projects/${PROJECT_ID}/topics/${survey}-alert_avros" |
| 19 | +ps_to_gcs_trigger_topic="${survey}-alerts_raw" |
| 20 | +ps_to_gcs_CF_name="${survey}-upload_bytes_to_bucket" |
| 21 | + |
| 22 | +# use test resources, if requested |
| 23 | +if [ "${testid}" != "False" ]; then |
| 24 | + avro_bucket="${avro_bucket}-${testid}" |
| 25 | + avro_topic="${avro_topic}-${testid}" |
| 26 | + ps_to_gcs_trigger_topic="${ps_to_gcs_trigger_topic}-${testid}" |
| 27 | + ps_to_gcs_CF_name="${ps_to_gcs_CF_name}-${testid}" |
| 28 | +fi |
| 29 | + |
| 30 | +if [ "${teardown}" = "True" ]; then |
| 31 | + # ensure that we do not teardown production resources |
| 32 | + if [ "${testid}" != "False" ]; then |
| 33 | + gsutil rm -r "gs://${avro_bucket}" |
| 34 | + gcloud pubsub topics delete "${avro_topic}" |
| 35 | + gcloud pubsub topics delete "${ps_to_gcs_trigger_topic}" |
| 36 | + gcloud functions delete "${ps_to_gcs_CF_name}" |
| 37 | + fi |
| 38 | + |
| 39 | +else # Deploy the Cloud Functions |
| 40 | + |
| 41 | + #--- Create the bucket that will store the alerts |
| 42 | + gsutil mb -l "${region}" "gs://${avro_bucket}" |
| 43 | + gsutil uniformbucketlevelaccess set on "gs://${avro_bucket}" |
| 44 | + gsutil requesterpays set on "gs://${avro_bucket}" |
| 45 | + gcloud storage buckets add-iam-policy-binding "gs://${avro_bucket}" \ |
| 46 | + --member="allUsers" \ |
| 47 | + --role="roles/storage.objectViewer" |
| 48 | + |
| 49 | + #--- Setup the Pub/Sub notifications on ZTF Avro storage bucket |
| 50 | + echo |
| 51 | + echo "Configuring Pub/Sub notifications on GCS bucket..." |
| 52 | + trigger_event=OBJECT_FINALIZE |
| 53 | + format=json # json or none; if json, file metadata sent in message body |
| 54 | + gsutil notification create \ |
| 55 | + -t "$avro_topic" \ |
| 56 | + -e "$trigger_event" \ |
| 57 | + -f "$format" \ |
| 58 | + "gs://${avro_bucket}" |
| 59 | + |
| 60 | +#--- Pub/Sub -> Cloud Storage Avro cloud function |
| 61 | + echo "Deploying Cloud Function: ${ps_to_gcs_CF_name}" |
| 62 | + ps_to_gcs_entry_point="run" |
| 63 | + memory=512MB # standard 256MB is too small here (it was always on the edge) |
| 64 | + |
| 65 | + gcloud functions deploy "${ps_to_gcs_CF_name}" \ |
| 66 | + --entry-point "${ps_to_gcs_entry_point}" \ |
| 67 | + --runtime python312 \ |
| 68 | + --memory "${memory}" \ |
| 69 | + --trigger-topic "${ps_to_gcs_trigger_topic}" \ |
| 70 | + --set-env-vars TESTID="${testid}",SURVEY="${survey}",VERSIONTAG="${versiontag}",GCP_PROJECT="${PROJECT_ID}" |
| 71 | +fi |
0 commit comments