Skip to content

Commit 7878181

Browse files
committed
Use Cloud Build to create all resources
1 parent ffb76f3 commit 7878181

File tree

5 files changed

+233
-134
lines changed

5 files changed

+233
-134
lines changed
Lines changed: 49 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,29 +1,56 @@
1-
# https://cloud.google.com/build/docs/deploying-builds/deploy-cloud-run
2-
# containerize the module and deploy it to Cloud Run
1+
# --------------- References ------------------ #
2+
# Cloud Build Overview: https://cloud.google.com/build/docs/overview
3+
# Deploying to Cloud Run: https://cloud.google.com/build/docs/deploying-builds/deploy-cloud-run
4+
# Schema for this file: https://cloud.google.com/build/docs/build-config-file-schema
5+
#
6+
# --------------- Substitutions --------------- #
7+
substitutions:
8+
_IMAGE_NAME: 'gcr.io/${PROJECT_ID}/${_REPOSITORY}/${_MODULE_NAME}'
9+
_IMAGE_PATH: '${LOCATION}-docker.pkg.dev/${PROJECT_ID}/${_REPOSITORY}/${_IMAGE_NAME}'
10+
# Different GCP services use different names for the same env variable:
11+
# PROJECT_ID, GOOGLE_CLOUD_PROJECT, and GCP_PROJECT.
12+
# We will use GCP_PROJECT as the env variable of our deployed Cloud Run service
13+
# for consistency with Cloud Functions, which sets this variable automatically.
14+
_MODULE_ENV: 'GCP_PROJECT=${PROJECT_ID},SURVEY=${_SURVEY},TESTID=${_TESTID}'
15+
#
16+
# --------------- Steps ----------------------- #
317
steps:
4-
# Build the image
18+
# Ancillaries: Create ancillary resources.
19+
- name: 'gcr.io/google.com/cloudsdktool/cloud-sdk'
20+
id: Ancillaries
21+
waitFor: ['-']
22+
entrypoint: bash
23+
args:
24+
- '-c'
25+
# Here we are just copying the needed files from the local machine.
26+
# For automatic deployments from GitHub, clone the repo instead.
27+
- |
28+
cp create-ancillaries.sh construct-name.sh /workspace/
29+
chmod +x /workspace/create-ancillaries.sh /workspace/construct-name.sh
30+
/workspace/create-ancillaries.sh
31+
# Build: Build the image.
532
- name: 'gcr.io/cloud-builders/docker'
6-
args: ['build', '-t', '${_REGION}-docker.pkg.dev/${PROJECT_ID}/${_REPOSITORY}/${_MODULE_IMAGE_NAME}', '.']
7-
# Push the image to Artifact Registry
33+
id: Build
34+
waitFor: ['-']
35+
args: ['build', '-t', '${_IMAGE_PATH}', '.']
36+
# Push: Push the image to the repository.
837
- name: 'gcr.io/cloud-builders/docker'
9-
args: ['push', '${_REGION}-docker.pkg.dev/${PROJECT_ID}/${_REPOSITORY}/${_MODULE_IMAGE_NAME}']
10-
# Deploy image to Cloud Run
38+
id: Push
39+
waitFor: ['Build']
40+
args: ['push', '${_IMAGE_PATH}']
41+
# Deploy: Deploy the Cloud Run service.
1142
- name: 'gcr.io/google.com/cloudsdktool/cloud-sdk'
43+
id: Deploy
44+
waitFor: ['Push'] # [CHECKME] Does this also need to wait for Ancillaries?
1245
entrypoint: gcloud
13-
args: ['run', 'deploy', '${_MODULE_NAME}', '--image', '${_REGION}-docker.pkg.dev/${PROJECT_ID}/${_REPOSITORY}/${_MODULE_IMAGE_NAME}', '--region', '${_REGION}', '--set-env-vars', '${_ENV_VARS}']
46+
args: ['run', 'deploy', '${_MODULE_NAME}', '--image', '${_IMAGE_PATH}', '--region', '${LOCATION}', '--set-env-vars', '${_MODULE_ENV}']
47+
#
48+
# --------------- Other ----------------------- #
1449
images:
15-
- '${_REGION}-docker.pkg.dev/${PROJECT_ID}/${_REPOSITORY}/${_MODULE_IMAGE_NAME}'
16-
substitutions:
17-
_SURVEY: 'lsst'
18-
_TESTID: 'testid'
19-
_MODULE_NAME: '${_SURVEY}-classify_with_SuperNNova-${_TESTID}'
20-
_MODULE_IMAGE_NAME: 'gcr.io/${PROJECT_ID}/${_REPOSITORY}/${_MODULE_NAME}'
21-
_REPOSITORY: 'cloud-run-services'
22-
# cloud functions automatically sets the projectid env var using the name "GCP_PROJECT"
23-
# use the same name here for consistency
24-
# [TODO] PROJECT_ID is set in setup.sh. this is confusing and we should revisit the decision.
25-
# i (Raen) think i didn't make it a substitution because i didn't want to set a default for it.
26-
_ENV_VARS: 'GCP_PROJECT=${PROJECT_ID},SURVEY=${_SURVEY},TESTID=${_TESTID}'
27-
_REGION: 'us-central1'
50+
- '${_IMAGE_PATH}'
2851
options:
29-
dynamic_substitutions: true
52+
# Include all built-in and custom substitutions as env variables for all build steps.
53+
automapSubstitutions: true
54+
# Within user-defined substitutions, allow referencing of other variables and bash parameter expansion.
55+
# https://cloud.google.com/build/docs/configuring-builds/use-bash-and-bindings-in-substitutions#bash_parameter_expansions
56+
dynamic_substitutions: true
Lines changed: 84 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,84 @@
1+
#!/bin/bash
2+
3+
_usage() {
4+
echo "Usage: $0 [-s|--stem] <stem> [[-g|--gcp-service] <gcp_service>]"
5+
}
6+
7+
_info() {
8+
echo "Use '$(basename $0) --help' for more information."
9+
}
10+
11+
_help() {
12+
echo "Construct the GCP resource name using the supplied options and the env vars SURVEY and TESTID."
13+
echo
14+
_usage
15+
echo
16+
echo "Options:"
17+
echo " -s, --stem <stem> Name stem for the resource. SURVEY will be prepended and TESTID "
18+
echo " appened (if not false)."
19+
echo " -g, --gcp-service <gcp_service>"
20+
echo " Determines the separator. If the value is 'bigquery', the"
21+
echo " separator will be '_'. Otherwise it is '-'."
22+
echo
23+
echo "Environment Variables:"
24+
echo " SURVEY Required. Prepend to resource name."
25+
echo " TESTID Required. Append to resource name if not 'False'."
26+
}
27+
28+
# Ensure that all required environment variables are set.
29+
check_env_vars() {
30+
local vars=("$@")
31+
for var in "${vars[@]}"; do
32+
if [ -z "${!var}" ]; then
33+
echo "Error: ${var} environment variable is not set."
34+
exit 1
35+
fi
36+
done
37+
}
38+
check_env_vars SURVEY TESTID
39+
40+
stem=""
41+
gcp_service=""
42+
43+
while [[ $# -gt 0 ]]; do
44+
key="$1"
45+
case $key in
46+
-s|--stem)
47+
stem="$2"
48+
shift
49+
shift
50+
;;
51+
-g|--gcp-service)
52+
gcp_service="$(echo "$2" | tr '[:upper:]' '[:lower:]')"
53+
shift
54+
shift
55+
;;
56+
-h|--help)
57+
_help
58+
exit 0
59+
;;
60+
*)
61+
echo "Invalid option: $1"
62+
_info
63+
exit 1
64+
;;
65+
esac
66+
done
67+
68+
if [ -z "$stem" ]; then
69+
echo "Missing required option 'stem'."
70+
_info
71+
exit 1
72+
fi
73+
74+
_sep="-"
75+
if [ "$gcp_service" = "bigquery" ]; then
76+
_sep="_"
77+
fi
78+
79+
_testid="${_sep}${TESTID}"
80+
if [ "$TESTID" = "False" ] || [ "$TESTID" = "false" ]; then
81+
_testid=""
82+
fi
83+
84+
echo "${SURVEY}${_sep}${stem}${_testid}"
Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
#! /bin/bash
2+
# Create ancillary resources that are needed by the Cloud Run service.
3+
# This script is intended to be run by Cloud Build.
4+
5+
# Define resource names.
6+
# BigQuery
7+
bq_dataset=$(construct-name.sh --stem "$_SURVEY" --gcp-resource bigquery)
8+
bq_table_supernnova="supernnova"
9+
# Pub/Sub
10+
ps_topic_out=$(construct-name.sh --stem "supernnova")
11+
ps_topic_bqimport=$(construct-name.sh --stem "bigquery-import-supernnova")
12+
ps_topic_bqimport_deadletter=$(construct-name.sh --stem "bigquery-import-supernnova-deadletter")
13+
ps_subscrip_trigger="$_TRIGGER_TOPIC"
14+
ps_subscrip_bqimport="$ps_topic_bqimport"
15+
ps_subscrip_bqimport_deadletter="$ps_topic_bqimport_deadletter"
16+
17+
# Create the resources.
18+
gcloud pubsub topics create "${ps_topic_out}"
19+
gcloud pubsub topics create "${ps_topic_bqimport}"
20+
gcloud pubsub topics create "${ps_topic_bqimport_deadletter}"
21+
gcloud pubsub subscriptions create "${ps_subscrip_bqimport_deadletter}" --topic="${ps_topic_bqimport_deadletter}"
22+
# [FIXME] This assumes that the BigQuery dataset and table already exist.
23+
gcloud pubsub subscriptions create "${ps_subscrip_bqimport}" \
24+
--topic="${ps_topic_bqimport}" \
25+
--bigquery-table="${PROJECT_ID}:${bq_dataset}.${bq_table_supernnova}" \
26+
--use-table-schema \
27+
--dead-letter-topic="${ps_topic_bqimport_deadletter}" \
28+
--max-delivery-attempts=5 \
29+
--dead-letter-topic-project="${PROJECT_ID}"
Lines changed: 63 additions & 112 deletions
Original file line numberDiff line numberDiff line change
@@ -1,122 +1,73 @@
11
#! /bin/bash
2-
# Deploys or deletes broker Cloud Run service
3-
# This script will not delete a Cloud Run service that is in production
2+
# Build the image, create ancillary resources, and deploy the module as a Cloud Run service.
3+
#
4+
# --------- Example usage -----------------------
5+
# First, double check the values in env.yaml. Then:
6+
# $ gcloud auth ...
7+
# $ export PROJECT_ID=... (is this set automatically by gcloud auth?)
8+
# $ bash deploy.sh
9+
# -----------------------------------------------
410

5-
# "False" uses production resources
6-
# any other string will be appended to the names of all resources
7-
testid="${1:-test}"
8-
# "True" tearsdown/deletes resources, else setup
9-
teardown="${2:-False}"
10-
# name of the survey this broker instance will ingest
11-
survey="${3:-lsst}"
12-
region="${4:-us-central1}"
13-
# get environment variables
14-
PROJECT_ID=$GOOGLE_CLOUD_PROJECT
15-
PROJECT_NUMBER=$(gcloud projects describe "$PROJECT_ID" --format="value(projectNumber)")
11+
# --------- Set environment variables -----------
12+
# Load env.yaml and set the key/value pairs as environment variables.
13+
# [FIXME] This depends on yq. We need to provide instructions for installing it
14+
# or else just have the user export these manually.
15+
while IFS='=' read -r key value; do
16+
export "$key=$value"
17+
done < <(yq -r 'to_entries | .[] | .key + "=" + .value' env.yaml)
1618

17-
MODULE_NAME="supernnova" # lower case required by cloud run
18-
ROUTE_RUN="/" # url route that will trigger main.run()
19-
20-
# function used to define GCP resources; appends testid if needed
21-
define_GCP_resources() {
22-
local base_name="$1"
23-
local testid_suffix=""
24-
25-
if [ "$testid" != "False" ]; then
26-
if [ "$base_name" = "$survey" ]; then
27-
testid_suffix="_${testid}" # complies with BigQuery naming conventions
28-
else
29-
testid_suffix="-${testid}"
30-
fi
19+
# Ensure that all required environment variables are set.
20+
check_env_vars() {
21+
local vars=("$@")
22+
for var in "${vars[@]}"; do
23+
if [ -z "${!var}" ]; then
24+
echo "Error: ${var} environment variable is not set."
25+
exit 1
3126
fi
32-
33-
echo "${base_name}${testid_suffix}"
27+
export "_${var}="
28+
done
3429
}
30+
check_env_vars PROJECT_ID _SURVEY _TESTID MODULE_NAME_STEM MODULE_ROUTE REGION REPOSITORY_STEM TRIGGER_TOPIC_STEM
3531

36-
#--- GCP resources used in this script
37-
artifact_registry_repo=$(define_GCP_resources "${survey}-cloud-run-services")
38-
deadletter_topic_bigquery_import=$(define_GCP_resources "${survey}-bigquery-import-SuperNNova-deadletter")
39-
deadletter_subscription_bigquery_import="${deadletter_topic_bigquery_import}"
40-
ps_input_subscrip=$(define_GCP_resources "${survey}-alerts") # pub/sub subscription used to trigger cloud run module
41-
ps_output_topic=$(define_GCP_resources "${survey}-SuperNNova")
42-
subscription_bigquery_import=$(define_GCP_resources "${survey}-bigquery-import-SuperNNova") # BigQuery subscription
43-
topic_bigquery_import=$(define_GCP_resources "${survey}-bigquery-import-SuperNNova")
44-
trigger_topic=$(define_GCP_resources "${survey}-alerts")
32+
# Construct and export additional environment variables for cloudbuild.yaml.
33+
# Environment variables that will be used by cloudbuild.yaml must start with "_", per GCP's requirements.
34+
export _MODULE_NAME=$(construct-name.sh --stem "$MODULE_NAME_STEM")
35+
export _REPOSITORY=$(construct-name.sh --stem "$REPOSITORY_STEM")
36+
export _TRIGGER_TOPIC=$(construct-name.sh --stem "$TRIGGER_TOPIC_STEM")
37+
# -----------------------------------------------
4538

46-
# additional GCP resources & variables used in this script
47-
bq_dataset=$(define_GCP_resources "${survey}")
48-
supernnova_classifications_table="SuperNNova"
49-
cr_module_name=$(define_GCP_resources "${survey}-${MODULE_NAME}") # lower case required by Cloud Run
39+
# --------- Project setup -----------------------
40+
# [FIXME] This is a project setup task, so should be moved to a script dedicated to that.
41+
# Ensure the Cloud Run service has the necessary permissions.
5042
runinvoker_svcact="cloud-run-invoker@${PROJECT_ID}.iam.gserviceaccount.com"
43+
gcloud run services add-iam-policy-binding "${_MODULE_NAME}" \
44+
--member="serviceAccount:${runinvoker_svcact}" \
45+
--role="roles/run.invoker"
46+
# -----------------------------------------------
5147

48+
# --------- Build -------------------------------
49+
# Execute the build steps.
50+
echo "Executing cloudbuild.yaml..."
51+
moduledir=$(dirname "$(readlink -f "$0")") # Absolute path to the parent directory of this script.
52+
url=$(gcloud builds submit \
53+
--config="${moduledir}/cloudbuild.yaml" \
54+
--region="${REGION}" \
55+
"${moduledir}" | sed -n 's/^Step #2: Service URL: \(.*\)$/\1/p'
56+
)
57+
# -----------------------------------------------
5258

53-
if [ "${teardown}" = "True" ]; then
54-
# ensure that we do not teardown production resources
55-
if [ "${testid}" != "False" ]; then
56-
gcloud pubsub topics delete "${ps_output_topic}"
57-
gcloud pubsub topics delete "${topic_bigquery_import}"
58-
gcloud pubsub topics delete "${deadletter_topic_bigquery_import}"
59-
gcloud pubsub subscriptions delete "${ps_input_subscrip}"
60-
gcloud pubsub subscriptions delete "${subscription_bigquery_import}"
61-
gcloud pubsub subscriptions delete "${deadletter_subscription_bigquery_import}"
62-
gcloud run services delete "${cr_module_name}" --region "${region}"
63-
fi
64-
65-
else # Deploy the Cloud Run service
66-
67-
#--- Deploy Cloud Run service
68-
echo "Configuring Pub/Sub resources for classify_snn Cloud Run service..."
69-
gcloud pubsub topics create "${ps_output_topic}"
70-
gcloud pubsub topics create "${topic_bigquery_import}"
71-
gcloud pubsub topics create "${deadletter_topic_bigquery_import}"
72-
gcloud pubsub subscriptions create "${deadletter_subscription_bigquery_import}" --topic="${deadletter_topic_bigquery_import}"
73-
# in order to create BigQuery subscriptions, ensure that the following service account:
74-
# service-<project number>@gcp-sa-pubsub.iam.gserviceaccount.com" has the
75-
# bigquery.dataEditor role for each table
76-
PUBSUB_SERVICE_ACCOUNT="service-${PROJECT_NUMBER}@gcp-sa-pubsub.iam.gserviceaccount.com"
77-
roleid="roles/bigquery.dataEditor"
78-
bq add-iam-policy-binding \
79-
--member="serviceAccount:${PUBSUB_SERVICE_ACCOUNT}" \
80-
--role="${roleid}" \
81-
--table=true "${PROJECT_ID}:${bq_dataset}.${supernnova_classifications_table}"
82-
gcloud pubsub subscriptions create "${subscription_bigquery_import}" \
83-
--topic="${topic_bigquery_import}" \
84-
--bigquery-table="${PROJECT_ID}:${bq_dataset}.${supernnova_classifications_table}" \
85-
--use-table-schema \
86-
--dead-letter-topic="${deadletter_topic_bigquery_import}" \
87-
--max-delivery-attempts=5 \
88-
--dead-letter-topic-project="${PROJECT_ID}"
89-
90-
# this allows dead-lettered messages to be forwarded from the BigQuery subscription to the dead letter topic
91-
# and it allows dead-lettered messages to be published to the dead letter topic.
92-
gcloud pubsub topics add-iam-policy-binding "${deadletter_topic_bigquery_import}" \
93-
--member="serviceAccount:$PUBSUB_SERVICE_ACCOUNT"\
94-
--role="roles/pubsub.publisher"
95-
gcloud pubsub subscriptions add-iam-policy-binding "${subscription_bigquery_import}" \
96-
--member="serviceAccount:$PUBSUB_SERVICE_ACCOUNT"\
97-
--role="roles/pubsub.subscriber"
98-
99-
echo "Creating container image and deploying to Cloud Run..."
100-
moduledir="." # deploys what's in our current directory
101-
config="${moduledir}/cloudbuild.yaml"
102-
url=$(gcloud builds submit --config="${config}" \
103-
--substitutions="_SURVEY=${survey},_TESTID=${testid},_MODULE_NAME=${cr_module_name},_REPOSITORY=${artifact_registry_repo}" \
104-
--region="${region}" \
105-
"${moduledir}" | sed -n 's/^Step #2: Service URL: \(.*\)$/\1/p')
106-
107-
# ensure the Cloud Run service has the necessary permisions
108-
role="roles/run.invoker"
109-
gcloud run services add-iam-policy-binding "${cr_module_name}" \
110-
--member="serviceAccount:${runinvoker_svcact}" \
111-
--role="${role}"
112-
113-
echo "Creating trigger subscription for Cloud Run..."
114-
# WARNING: This is set to retry failed deliveries. If there is a bug in main.py this will
115-
# retry indefinitely, until the message is delete manually.
116-
gcloud pubsub subscriptions create "${ps_input_subscrip}" \
117-
--topic "${trigger_topic}" \
118-
--topic-project "${PROJECT_ID}" \
119-
--ack-deadline=600 \
120-
--push-endpoint="${url}${ROUTE_RUN}" \
121-
--push-auth-service-account="${runinvoker_svcact}"
122-
fi
59+
# --------- Finish build ------------------------
60+
# [FIXME] Figure out how to include this in cloudbuild.yaml. It is here because we need the value of $url.
61+
# Create the subscription that will trigger the Cloud Run service.
62+
echo "Creating trigger subscription for Cloud Run..."
63+
# [FIXME] Handle these retries better.
64+
echo "WARNING: This is set to retry failed deliveries. If there is a bug in main.py this will"
65+
echo " retry indefinitely, until the message is delete manually."
66+
trigger_subscrip="$_TRIGGER_TOPIC"
67+
gcloud pubsub subscriptions create "${trigger_subscrip}" \
68+
--topic "${_TRIGGER_TOPIC}" \
69+
--topic-project "${PROJECT_ID}" \
70+
--ack-deadline=600 \
71+
--push-endpoint="${url}${MODULE_ROUTE}" \
72+
--push-auth-service-account="${runinvoker_svcact}"
73+
# -----------------------------------------------
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
# Environment variables that will be used by cloudbuild.yaml must start with "_", per GCP's requirements.
2+
_TESTID: 'testid'
3+
_SURVEY: 'lsst'
4+
MODULE_NAME_STEM: 'supernnova'
5+
MODULE_ROUTE: '/' # url route that will trigger main.run()
6+
REGION: 'us-central1'
7+
REPOSITORY_STEM: 'cloud-run-services'
8+
TRIGGER_TOPIC_STEM: 'alerts'

0 commit comments

Comments
 (0)