|
1 | 1 | #! /bin/bash |
2 | | -# Deploys or deletes broker Cloud Run service |
3 | | -# This script will not delete a Cloud Run service that is in production |
| 2 | +# Build the image, create ancillary resources, and deploy the module as a Cloud Run service. |
| 3 | +# |
| 4 | +# --------- Example usage ----------------------- |
| 5 | +# First, double check the values in env.yaml. Then: |
| 6 | +# $ gcloud auth ... |
| 7 | +# $ export PROJECT_ID=... (is this set automatically by gcloud auth?) |
| 8 | +# $ bash deploy.sh |
| 9 | +# ----------------------------------------------- |
4 | 10 |
|
5 | | -# "False" uses production resources |
6 | | -# any other string will be appended to the names of all resources |
7 | | -testid="${1:-test}" |
8 | | -# "True" tearsdown/deletes resources, else setup |
9 | | -teardown="${2:-False}" |
10 | | -# name of the survey this broker instance will ingest |
11 | | -survey="${3:-lsst}" |
12 | | -region="${4:-us-central1}" |
13 | | -# get environment variables |
14 | | -PROJECT_ID=$GOOGLE_CLOUD_PROJECT |
15 | | -PROJECT_NUMBER=$(gcloud projects describe "$PROJECT_ID" --format="value(projectNumber)") |
| 11 | +# --------- Set environment variables ----------- |
| 12 | +# Load env.yaml and set the key/value pairs as environment variables. |
| 13 | +# [FIXME] This depends on yq. We need to provide instructions for installing it |
| 14 | +# or else just have the user export these manually. |
| 15 | +while IFS='=' read -r key value; do |
| 16 | + export "$key=$value" |
| 17 | +done < <(yq -r 'to_entries | .[] | .key + "=" + .value' env.yaml) |
16 | 18 |
|
17 | | -MODULE_NAME="supernnova" # lower case required by cloud run |
18 | | -ROUTE_RUN="/" # url route that will trigger main.run() |
19 | | - |
20 | | -# function used to define GCP resources; appends testid if needed |
21 | | -define_GCP_resources() { |
22 | | - local base_name="$1" |
23 | | - local testid_suffix="" |
24 | | - |
25 | | - if [ "$testid" != "False" ]; then |
26 | | - if [ "$base_name" = "$survey" ]; then |
27 | | - testid_suffix="_${testid}" # complies with BigQuery naming conventions |
28 | | - else |
29 | | - testid_suffix="-${testid}" |
30 | | - fi |
| 19 | +# Ensure that all required environment variables are set. |
| 20 | +check_env_vars() { |
| 21 | + local vars=("$@") |
| 22 | + for var in "${vars[@]}"; do |
| 23 | + if [ -z "${!var}" ]; then |
| 24 | + echo "Error: ${var} environment variable is not set." |
| 25 | + exit 1 |
31 | 26 | fi |
32 | | - |
33 | | - echo "${base_name}${testid_suffix}" |
| 27 | + export "_${var}=" |
| 28 | + done |
34 | 29 | } |
| 30 | +check_env_vars PROJECT_ID _SURVEY _TESTID MODULE_NAME_STEM MODULE_ROUTE REGION REPOSITORY_STEM TRIGGER_TOPIC_STEM |
35 | 31 |
|
36 | | -#--- GCP resources used in this script |
37 | | -artifact_registry_repo=$(define_GCP_resources "${survey}-cloud-run-services") |
38 | | -deadletter_topic_bigquery_import=$(define_GCP_resources "${survey}-bigquery-import-SuperNNova-deadletter") |
39 | | -deadletter_subscription_bigquery_import="${deadletter_topic_bigquery_import}" |
40 | | -ps_input_subscrip=$(define_GCP_resources "${survey}-alerts") # pub/sub subscription used to trigger cloud run module |
41 | | -ps_output_topic=$(define_GCP_resources "${survey}-SuperNNova") |
42 | | -subscription_bigquery_import=$(define_GCP_resources "${survey}-bigquery-import-SuperNNova") # BigQuery subscription |
43 | | -topic_bigquery_import=$(define_GCP_resources "${survey}-bigquery-import-SuperNNova") |
44 | | -trigger_topic=$(define_GCP_resources "${survey}-alerts") |
| 32 | +# Construct and export additional environment variables for cloudbuild.yaml. |
| 33 | +# Environment variables that will be used by cloudbuild.yaml must start with "_", per GCP's requirements. |
| 34 | +export _MODULE_NAME=$(construct-name.sh --stem "$MODULE_NAME_STEM") |
| 35 | +export _REPOSITORY=$(construct-name.sh --stem "$REPOSITORY_STEM") |
| 36 | +export _TRIGGER_TOPIC=$(construct-name.sh --stem "$TRIGGER_TOPIC_STEM") |
| 37 | +# ----------------------------------------------- |
45 | 38 |
|
46 | | -# additional GCP resources & variables used in this script |
47 | | -bq_dataset=$(define_GCP_resources "${survey}") |
48 | | -supernnova_classifications_table="SuperNNova" |
49 | | -cr_module_name=$(define_GCP_resources "${survey}-${MODULE_NAME}") # lower case required by Cloud Run |
| 39 | +# --------- Project setup ----------------------- |
| 40 | +# [FIXME] This is a project setup task, so should be moved to a script dedicated to that. |
| 41 | +# Ensure the Cloud Run service has the necessary permissions. |
50 | 42 | runinvoker_svcact="cloud-run-invoker@${PROJECT_ID}.iam.gserviceaccount.com" |
| 43 | +gcloud run services add-iam-policy-binding "${_MODULE_NAME}" \ |
| 44 | + --member="serviceAccount:${runinvoker_svcact}" \ |
| 45 | + --role="roles/run.invoker" |
| 46 | +# ----------------------------------------------- |
51 | 47 |
|
| 48 | +# --------- Build ------------------------------- |
| 49 | +# Execute the build steps. |
| 50 | +echo "Executing cloudbuild.yaml..." |
| 51 | +moduledir=$(dirname "$(readlink -f "$0")") # Absolute path to the parent directory of this script. |
| 52 | +url=$(gcloud builds submit \ |
| 53 | + --config="${moduledir}/cloudbuild.yaml" \ |
| 54 | + --region="${REGION}" \ |
| 55 | + "${moduledir}" | sed -n 's/^Step #2: Service URL: \(.*\)$/\1/p' |
| 56 | +) |
| 57 | +# ----------------------------------------------- |
52 | 58 |
|
53 | | -if [ "${teardown}" = "True" ]; then |
54 | | - # ensure that we do not teardown production resources |
55 | | - if [ "${testid}" != "False" ]; then |
56 | | - gcloud pubsub topics delete "${ps_output_topic}" |
57 | | - gcloud pubsub topics delete "${topic_bigquery_import}" |
58 | | - gcloud pubsub topics delete "${deadletter_topic_bigquery_import}" |
59 | | - gcloud pubsub subscriptions delete "${ps_input_subscrip}" |
60 | | - gcloud pubsub subscriptions delete "${subscription_bigquery_import}" |
61 | | - gcloud pubsub subscriptions delete "${deadletter_subscription_bigquery_import}" |
62 | | - gcloud run services delete "${cr_module_name}" --region "${region}" |
63 | | - fi |
64 | | - |
65 | | -else # Deploy the Cloud Run service |
66 | | - |
67 | | -#--- Deploy Cloud Run service |
68 | | - echo "Configuring Pub/Sub resources for classify_snn Cloud Run service..." |
69 | | - gcloud pubsub topics create "${ps_output_topic}" |
70 | | - gcloud pubsub topics create "${topic_bigquery_import}" |
71 | | - gcloud pubsub topics create "${deadletter_topic_bigquery_import}" |
72 | | - gcloud pubsub subscriptions create "${deadletter_subscription_bigquery_import}" --topic="${deadletter_topic_bigquery_import}" |
73 | | - # in order to create BigQuery subscriptions, ensure that the following service account: |
74 | | - # service-<project number>@gcp-sa-pubsub.iam.gserviceaccount.com" has the |
75 | | - # bigquery.dataEditor role for each table |
76 | | - PUBSUB_SERVICE_ACCOUNT="service-${PROJECT_NUMBER}@gcp-sa-pubsub.iam.gserviceaccount.com" |
77 | | - roleid="roles/bigquery.dataEditor" |
78 | | - bq add-iam-policy-binding \ |
79 | | - --member="serviceAccount:${PUBSUB_SERVICE_ACCOUNT}" \ |
80 | | - --role="${roleid}" \ |
81 | | - --table=true "${PROJECT_ID}:${bq_dataset}.${supernnova_classifications_table}" |
82 | | - gcloud pubsub subscriptions create "${subscription_bigquery_import}" \ |
83 | | - --topic="${topic_bigquery_import}" \ |
84 | | - --bigquery-table="${PROJECT_ID}:${bq_dataset}.${supernnova_classifications_table}" \ |
85 | | - --use-table-schema \ |
86 | | - --dead-letter-topic="${deadletter_topic_bigquery_import}" \ |
87 | | - --max-delivery-attempts=5 \ |
88 | | - --dead-letter-topic-project="${PROJECT_ID}" |
89 | | - |
90 | | - # this allows dead-lettered messages to be forwarded from the BigQuery subscription to the dead letter topic |
91 | | - # and it allows dead-lettered messages to be published to the dead letter topic. |
92 | | - gcloud pubsub topics add-iam-policy-binding "${deadletter_topic_bigquery_import}" \ |
93 | | - --member="serviceAccount:$PUBSUB_SERVICE_ACCOUNT"\ |
94 | | - --role="roles/pubsub.publisher" |
95 | | - gcloud pubsub subscriptions add-iam-policy-binding "${subscription_bigquery_import}" \ |
96 | | - --member="serviceAccount:$PUBSUB_SERVICE_ACCOUNT"\ |
97 | | - --role="roles/pubsub.subscriber" |
98 | | - |
99 | | - echo "Creating container image and deploying to Cloud Run..." |
100 | | - moduledir="." # deploys what's in our current directory |
101 | | - config="${moduledir}/cloudbuild.yaml" |
102 | | - url=$(gcloud builds submit --config="${config}" \ |
103 | | - --substitutions="_SURVEY=${survey},_TESTID=${testid},_MODULE_NAME=${cr_module_name},_REPOSITORY=${artifact_registry_repo}" \ |
104 | | - --region="${region}" \ |
105 | | - "${moduledir}" | sed -n 's/^Step #2: Service URL: \(.*\)$/\1/p') |
106 | | - |
107 | | - # ensure the Cloud Run service has the necessary permisions |
108 | | - role="roles/run.invoker" |
109 | | - gcloud run services add-iam-policy-binding "${cr_module_name}" \ |
110 | | - --member="serviceAccount:${runinvoker_svcact}" \ |
111 | | - --role="${role}" |
112 | | - |
113 | | - echo "Creating trigger subscription for Cloud Run..." |
114 | | - # WARNING: This is set to retry failed deliveries. If there is a bug in main.py this will |
115 | | - # retry indefinitely, until the message is delete manually. |
116 | | - gcloud pubsub subscriptions create "${ps_input_subscrip}" \ |
117 | | - --topic "${trigger_topic}" \ |
118 | | - --topic-project "${PROJECT_ID}" \ |
119 | | - --ack-deadline=600 \ |
120 | | - --push-endpoint="${url}${ROUTE_RUN}" \ |
121 | | - --push-auth-service-account="${runinvoker_svcact}" |
122 | | -fi |
| 59 | +# --------- Finish build ------------------------ |
| 60 | +# [FIXME] Figure out how to include this in cloudbuild.yaml. It is here because we need the value of $url. |
| 61 | +# Create the subscription that will trigger the Cloud Run service. |
| 62 | +echo "Creating trigger subscription for Cloud Run..." |
| 63 | +# [FIXME] Handle these retries better. |
| 64 | +echo "WARNING: This is set to retry failed deliveries. If there is a bug in main.py this will" |
| 65 | +echo " retry indefinitely, until the message is delete manually." |
| 66 | +trigger_subscrip="$_TRIGGER_TOPIC" |
| 67 | +gcloud pubsub subscriptions create "${trigger_subscrip}" \ |
| 68 | + --topic "${_TRIGGER_TOPIC}" \ |
| 69 | + --topic-project "${PROJECT_ID}" \ |
| 70 | + --ack-deadline=600 \ |
| 71 | + --push-endpoint="${url}${MODULE_ROUTE}" \ |
| 72 | + --push-auth-service-account="${runinvoker_svcact}" |
| 73 | +# ----------------------------------------------- |
0 commit comments