|
| 1 | +#! /bin/bash |
| 2 | +# Deploys or deletes broker Cloud Run service |
| 3 | +# This script will not delete a Cloud Run service that is in production |
| 4 | + |
| 5 | +# "False" uses production resources |
| 6 | +# any other string will be appended to the names of all resources |
| 7 | +testid="${1:-test}" |
| 8 | +# "True" tearsdown/deletes resources, else setup |
| 9 | +teardown="${2:-False}" |
| 10 | +# name of the survey this broker instance will ingest |
| 11 | +survey="${3:-lsst}" |
| 12 | +region="${4:-us-central1}" |
| 13 | +# get environment variables |
| 14 | +PROJECT_ID=$GOOGLE_CLOUD_PROJECT |
| 15 | +PROJECT_NUMBER=$(gcloud projects describe "$PROJECT_ID" --format="value(projectNumber)") |
| 16 | + |
| 17 | +MODULE_NAME="oracle" # lower case required by cloud run |
| 18 | +ROUTE_RUN="/" # url route that will trigger main.run() |
| 19 | + |
| 20 | +# function used to define GCP resources; appends testid if needed |
| 21 | +define_GCP_resources() { |
| 22 | + local base_name="$1" |
| 23 | + local testid_suffix="" |
| 24 | + |
| 25 | + if [ "$testid" != "False" ]; then |
| 26 | + if [ "$base_name" = "$survey" ]; then |
| 27 | + testid_suffix="_${testid}" # complies with BigQuery naming conventions |
| 28 | + else |
| 29 | + testid_suffix="-${testid}" |
| 30 | + fi |
| 31 | + fi |
| 32 | + |
| 33 | + echo "${base_name}${testid_suffix}" |
| 34 | +} |
| 35 | + |
| 36 | +#--- GCP resources used in this script |
| 37 | +artifact_registry_repo=$(define_GCP_resources "${survey}-cloud-run-services") |
| 38 | +deadletter_topic_bigquery_import_scone=$(define_GCP_resources "${survey}-bigquery-import-SCONE-deadletter") |
| 39 | +deadletter_topic_bigquery_import_classifications=$(define_GCP_resources "${survey}-bigquery-import-classifications-deadletter") |
| 40 | +deadletter_subscription_bigquery_import_scone="${deadletter_topic_bigquery_import_scone}" |
| 41 | +deadletter_subscription_bigquery_import_classifications="${deadletter_topic_bigquery_import_classifications}" |
| 42 | +topic_bigquery_import_scone=$(define_GCP_resources "${survey}-bigquery-import-SCONE") |
| 43 | +topic_bigquery_import_classifications=$(define_GCP_resources "${survey}-bigquery-import-classifications") |
| 44 | +subscription_bigquery_import_scone="${topic_bigquery_import_scone}" # BigQuery subscription |
| 45 | +subscription_bigquery_import_classifications="${topic_bigquery_import_classifications}" # BigQuery subscription |
| 46 | +trigger_topic=$(define_GCP_resources "${survey}-alerts") |
| 47 | +ps_input_subscrip="${trigger_topic}" # pub/sub subscription used to trigger cloud run module |
| 48 | +ps_output_topic=$(define_GCP_resources "${survey}-SCONE") |
| 49 | + |
| 50 | +# additional GCP resources & variables used in this script |
| 51 | +bq_dataset=$(define_GCP_resources "${survey}") |
| 52 | +scone_table="SCONE" |
| 53 | +classifications_table="classifications" |
| 54 | +cr_module_name=$(define_GCP_resources "${survey}-${MODULE_NAME}") # lower case required by Cloud Run |
| 55 | +runinvoker_svcact="cloud-run-invoker@${PROJECT_ID}.iam.gserviceaccount.com" |
| 56 | + |
| 57 | +if [ "${teardown}" = "True" ]; then |
| 58 | + # ensure that we do not teardown production resources |
| 59 | + if [ "${testid}" != "False" ]; then |
| 60 | + gcloud pubsub topics delete "${ps_output_topic}" |
| 61 | + gcloud pubsub topics delete "${topic_bigquery_import_scone}" |
| 62 | + gcloud pubsub topics delete "${topic_bigquery_import_classifications}" |
| 63 | + gcloud pubsub topics delete "${deadletter_topic_bigquery_import_scone}" |
| 64 | + gcloud pubsub topics delete "${deadletter_topic_bigquery_import_classifications}" |
| 65 | + gcloud pubsub subscriptions delete "${ps_input_subscrip}" |
| 66 | + gcloud pubsub subscriptions delete "${subscription_bigquery_import_scone}" |
| 67 | + gcloud pubsub subscriptions delete "${subscription_bigquery_import_classifications}" |
| 68 | + gcloud pubsub subscriptions delete "${deadletter_subscription_bigquery_import_scone}" |
| 69 | + gcloud pubsub subscriptions delete "${deadletter_subscription_bigquery_import_classifications}" |
| 70 | + gcloud run services delete "${cr_module_name}" --region "${region}" |
| 71 | + fi |
| 72 | + |
| 73 | +else # Deploy the Cloud Run service |
| 74 | + |
| 75 | +#--- Deploy Cloud Run service |
| 76 | + echo "Configuring Pub/Sub resources for classify_scone Cloud Run service..." |
| 77 | + gcloud pubsub topics create "${ps_output_topic}" |
| 78 | + gcloud pubsub topics create "${deadletter_topic_bigquery_import_scone}" |
| 79 | + gcloud pubsub topics create "${deadletter_topic_bigquery_import_classifications}" |
| 80 | + gcloud pubsub topics create "${topic_bigquery_import_scone}" |
| 81 | + gcloud pubsub topics create "${topic_bigquery_import_classifications}" |
| 82 | + gcloud pubsub subscriptions create "${deadletter_subscription_bigquery_import_scone}" --topic="${deadletter_topic_bigquery_import_scone}" |
| 83 | + gcloud pubsub subscriptions create "${deadletter_subscription_bigquery_import_classifications}" --topic="${deadletter_topic_bigquery_import_classifications}" |
| 84 | + |
| 85 | + # in order to create BigQuery subscriptions, ensure that the following service account: |
| 86 | + # service-<project number>@gcp-sa-pubsub.iam.gserviceaccount.com" has the |
| 87 | + # bigquery.dataEditor role for each table |
| 88 | + PUBSUB_SERVICE_ACCOUNT="service-${PROJECT_NUMBER}@gcp-sa-pubsub.iam.gserviceaccount.com" |
| 89 | + roleid="roles/bigquery.dataEditor" |
| 90 | + bq add-iam-policy-binding \ |
| 91 | + --member="serviceAccount:${PUBSUB_SERVICE_ACCOUNT}" \ |
| 92 | + --role="${roleid}" \ |
| 93 | + --table=true "${PROJECT_ID}:${bq_dataset}.${scone_table}" |
| 94 | + gcloud pubsub subscriptions create "${subscription_bigquery_import_scone}" \ |
| 95 | + --topic="${topic_bigquery_import_scone}" \ |
| 96 | + --bigquery-table="${PROJECT_ID}:${bq_dataset}.${scone_table}" \ |
| 97 | + --use-table-schema \ |
| 98 | + --dead-letter-topic="${deadletter_topic_bigquery_import_scone}" \ |
| 99 | + --max-delivery-attempts=5 \ |
| 100 | + --dead-letter-topic-project="${PROJECT_ID}" |
| 101 | + gcloud pubsub subscriptions create "${subscription_bigquery_import_classifications}" \ |
| 102 | + --topic="${topic_bigquery_import_classifications}" \ |
| 103 | + --bigquery-table="${PROJECT_ID}:${bq_dataset}.${classifications_table}" \ |
| 104 | + --use-table-schema \ |
| 105 | + --dead-letter-topic="${deadletter_topic_bigquery_import_classifications}" \ |
| 106 | + --max-delivery-attempts=5 \ |
| 107 | + --dead-letter-topic-project="${PROJECT_ID}" |
| 108 | + |
| 109 | + # this allows dead-lettered messages to be forwarded from the BigQuery subscription to the dead letter topic |
| 110 | + # and it allows dead-lettered messages to be published to the dead letter topic. |
| 111 | + gcloud pubsub topics add-iam-policy-binding "${deadletter_topic_bigquery_import_scone}" \ |
| 112 | + --member="serviceAccount:$PUBSUB_SERVICE_ACCOUNT"\ |
| 113 | + --role="roles/pubsub.publisher" |
| 114 | + gcloud pubsub subscriptions add-iam-policy-binding "${subscription_bigquery_import_scone}" \ |
| 115 | + --member="serviceAccount:$PUBSUB_SERVICE_ACCOUNT"\ |
| 116 | + --role="roles/pubsub.subscriber" |
| 117 | + gcloud pubsub topics add-iam-policy-binding "${deadletter_topic_bigquery_import_classifications}" \ |
| 118 | + --member="serviceAccount:$PUBSUB_SERVICE_ACCOUNT"\ |
| 119 | + --role="roles/pubsub.publisher" |
| 120 | + gcloud pubsub subscriptions add-iam-policy-binding "${subscription_bigquery_import_classifications}" \ |
| 121 | + --member="serviceAccount:$PUBSUB_SERVICE_ACCOUNT"\ |
| 122 | + --role="roles/pubsub.subscriber" |
| 123 | + |
| 124 | + echo "Creating container image and deploying to Cloud Run..." |
| 125 | + moduledir="." # deploys what's in our current directory |
| 126 | + config="${moduledir}/cloudbuild.yaml" |
| 127 | + url=$(gcloud builds submit --config="${config}" \ |
| 128 | + --substitutions="_SURVEY=${survey},_TESTID=${testid},_MODULE_NAME=${cr_module_name},_REPOSITORY=${artifact_registry_repo}" \ |
| 129 | + --region="${region}" \ |
| 130 | + "${moduledir}" | sed -n 's/^Step #2: Service URL: \(.*\)$/\1/p') |
| 131 | + |
| 132 | + # ensure the Cloud Run service has the necessary permisions |
| 133 | + role="roles/run.invoker" |
| 134 | + gcloud run services add-iam-policy-binding "${cr_module_name}" \ |
| 135 | + --member="serviceAccount:${runinvoker_svcact}" \ |
| 136 | + --role="${role}" |
| 137 | + |
| 138 | + echo "Creating trigger subscription for Cloud Run..." |
| 139 | + # WARNING: This is set to retry failed deliveries. If there is a bug in main.py this will |
| 140 | + # retry indefinitely, until the message is delete manually. |
| 141 | + gcloud pubsub subscriptions create "${ps_input_subscrip}" \ |
| 142 | + --topic "${trigger_topic}" \ |
| 143 | + --topic-project "${PROJECT_ID}" \ |
| 144 | + --ack-deadline=600 \ |
| 145 | + --push-endpoint="${url}${ROUTE_RUN}" \ |
| 146 | + --push-auth-service-account="${runinvoker_svcact}" |
| 147 | +fi |
0 commit comments