Skip to content

Commit e158cf2

Browse files
committed
initial commit
1 parent 25324ab commit e158cf2

File tree

4 files changed

+211
-0
lines changed

4 files changed

+211
-0
lines changed
Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
# Use the official lightweight Python image.
2+
# https://hub.docker.com/_/python
3+
FROM python:3.12-slim
4+
5+
# Allow statements and log messages to immediately appear in the Knative logs
6+
ENV PYTHONUNBUFFERED True
7+
8+
# Copy local code to the container image.
9+
ENV APP_HOME /app
10+
WORKDIR $APP_HOME
11+
COPY . ./
12+
13+
# Install production dependencies.
14+
RUN pip install --no-cache-dir -r requirements.txt
15+
16+
# Run the web service on container startup. Here we use the gunicorn
17+
# webserver, with one worker process and 8 threads.
18+
# For environments with multiple CPU cores, increase the number of workers
19+
# to be equal to the cores available.
20+
# Timeout is set to 0 to disable the timeouts of the workers to allow Cloud Run to handle instance scaling.
21+
CMD exec gunicorn --bind :$PORT --workers 1 --threads 8 --timeout 0 main:app
Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
# https://cloud.google.com/build/docs/deploying-builds/deploy-cloud-run
2+
# containerize the module and deploy it to Cloud Run
3+
steps:
4+
# Build the image
5+
- name: 'gcr.io/cloud-builders/docker'
6+
args: ['build', '-t', '${_REGION}-docker.pkg.dev/${PROJECT_ID}/${_REPOSITORY}/${_MODULE_IMAGE_NAME}', '.']
7+
# Push the image to Artifact Registry
8+
- name: 'gcr.io/cloud-builders/docker'
9+
args: ['push', '${_REGION}-docker.pkg.dev/${PROJECT_ID}/${_REPOSITORY}/${_MODULE_IMAGE_NAME}']
10+
# Deploy image to Cloud Run
11+
- name: 'gcr.io/google.com/cloudsdktool/cloud-sdk'
12+
entrypoint: gcloud
13+
args: ['run', 'deploy', '${_MODULE_NAME}', '--image', '${_REGION}-docker.pkg.dev/${PROJECT_ID}/${_REPOSITORY}/${_MODULE_IMAGE_NAME}', '--region', '${_REGION}', '--set-env-vars', '${_ENV_VARS}']
14+
images:
15+
- '${_REGION}-docker.pkg.dev/${PROJECT_ID}/${_REPOSITORY}/${_MODULE_IMAGE_NAME}'
16+
substitutions:
17+
_SURVEY: 'lsst'
18+
_TESTID: 'testid'
19+
_MODULE_NAME: '${_SURVEY}-classify_with_ORACLE-${_TESTID}'
20+
_MODULE_IMAGE_NAME: 'gcr.io/${PROJECT_ID}/${_REPOSITORY}/${_MODULE_NAME}'
21+
_REPOSITORY: 'cloud-run-services'
22+
# cloud functions automatically sets the projectid env var using the name "GCP_PROJECT"
23+
# use the same name here for consistency
24+
# [TODO] PROJECT_ID is set in setup.sh. this is confusing and we should revisit the decision.
25+
# i (Raen) think i didn't make it a substitution because i didn't want to set a default for it.
26+
_ENV_VARS: 'GCP_PROJECT=${PROJECT_ID},SURVEY=${_SURVEY},TESTID=${_TESTID}'
27+
_REGION: 'us-central1'
28+
options:
29+
dynamic_substitutions: true
Lines changed: 147 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,147 @@
1+
#! /bin/bash
2+
# Deploys or deletes broker Cloud Run service
3+
# This script will not delete a Cloud Run service that is in production
4+
5+
# "False" uses production resources
6+
# any other string will be appended to the names of all resources
7+
testid="${1:-test}"
8+
# "True" tearsdown/deletes resources, else setup
9+
teardown="${2:-False}"
10+
# name of the survey this broker instance will ingest
11+
survey="${3:-lsst}"
12+
region="${4:-us-central1}"
13+
# get environment variables
14+
PROJECT_ID=$GOOGLE_CLOUD_PROJECT
15+
PROJECT_NUMBER=$(gcloud projects describe "$PROJECT_ID" --format="value(projectNumber)")
16+
17+
MODULE_NAME="oracle" # lower case required by cloud run
18+
ROUTE_RUN="/" # url route that will trigger main.run()
19+
20+
# function used to define GCP resources; appends testid if needed
21+
define_GCP_resources() {
22+
local base_name="$1"
23+
local testid_suffix=""
24+
25+
if [ "$testid" != "False" ]; then
26+
if [ "$base_name" = "$survey" ]; then
27+
testid_suffix="_${testid}" # complies with BigQuery naming conventions
28+
else
29+
testid_suffix="-${testid}"
30+
fi
31+
fi
32+
33+
echo "${base_name}${testid_suffix}"
34+
}
35+
36+
#--- GCP resources used in this script
37+
artifact_registry_repo=$(define_GCP_resources "${survey}-cloud-run-services")
38+
deadletter_topic_bigquery_import_scone=$(define_GCP_resources "${survey}-bigquery-import-SCONE-deadletter")
39+
deadletter_topic_bigquery_import_classifications=$(define_GCP_resources "${survey}-bigquery-import-classifications-deadletter")
40+
deadletter_subscription_bigquery_import_scone="${deadletter_topic_bigquery_import_scone}"
41+
deadletter_subscription_bigquery_import_classifications="${deadletter_topic_bigquery_import_classifications}"
42+
topic_bigquery_import_scone=$(define_GCP_resources "${survey}-bigquery-import-SCONE")
43+
topic_bigquery_import_classifications=$(define_GCP_resources "${survey}-bigquery-import-classifications")
44+
subscription_bigquery_import_scone="${topic_bigquery_import_scone}" # BigQuery subscription
45+
subscription_bigquery_import_classifications="${topic_bigquery_import_classifications}" # BigQuery subscription
46+
trigger_topic=$(define_GCP_resources "${survey}-alerts")
47+
ps_input_subscrip="${trigger_topic}" # pub/sub subscription used to trigger cloud run module
48+
ps_output_topic=$(define_GCP_resources "${survey}-SCONE")
49+
50+
# additional GCP resources & variables used in this script
51+
bq_dataset=$(define_GCP_resources "${survey}")
52+
scone_table="SCONE"
53+
classifications_table="classifications"
54+
cr_module_name=$(define_GCP_resources "${survey}-${MODULE_NAME}") # lower case required by Cloud Run
55+
runinvoker_svcact="cloud-run-invoker@${PROJECT_ID}.iam.gserviceaccount.com"
56+
57+
if [ "${teardown}" = "True" ]; then
58+
# ensure that we do not teardown production resources
59+
if [ "${testid}" != "False" ]; then
60+
gcloud pubsub topics delete "${ps_output_topic}"
61+
gcloud pubsub topics delete "${topic_bigquery_import_scone}"
62+
gcloud pubsub topics delete "${topic_bigquery_import_classifications}"
63+
gcloud pubsub topics delete "${deadletter_topic_bigquery_import_scone}"
64+
gcloud pubsub topics delete "${deadletter_topic_bigquery_import_classifications}"
65+
gcloud pubsub subscriptions delete "${ps_input_subscrip}"
66+
gcloud pubsub subscriptions delete "${subscription_bigquery_import_scone}"
67+
gcloud pubsub subscriptions delete "${subscription_bigquery_import_classifications}"
68+
gcloud pubsub subscriptions delete "${deadletter_subscription_bigquery_import_scone}"
69+
gcloud pubsub subscriptions delete "${deadletter_subscription_bigquery_import_classifications}"
70+
gcloud run services delete "${cr_module_name}" --region "${region}"
71+
fi
72+
73+
else # Deploy the Cloud Run service
74+
75+
#--- Deploy Cloud Run service
76+
echo "Configuring Pub/Sub resources for classify_scone Cloud Run service..."
77+
gcloud pubsub topics create "${ps_output_topic}"
78+
gcloud pubsub topics create "${deadletter_topic_bigquery_import_scone}"
79+
gcloud pubsub topics create "${deadletter_topic_bigquery_import_classifications}"
80+
gcloud pubsub topics create "${topic_bigquery_import_scone}"
81+
gcloud pubsub topics create "${topic_bigquery_import_classifications}"
82+
gcloud pubsub subscriptions create "${deadletter_subscription_bigquery_import_scone}" --topic="${deadletter_topic_bigquery_import_scone}"
83+
gcloud pubsub subscriptions create "${deadletter_subscription_bigquery_import_classifications}" --topic="${deadletter_topic_bigquery_import_classifications}"
84+
85+
# in order to create BigQuery subscriptions, ensure that the following service account:
86+
# service-<project number>@gcp-sa-pubsub.iam.gserviceaccount.com" has the
87+
# bigquery.dataEditor role for each table
88+
PUBSUB_SERVICE_ACCOUNT="service-${PROJECT_NUMBER}@gcp-sa-pubsub.iam.gserviceaccount.com"
89+
roleid="roles/bigquery.dataEditor"
90+
bq add-iam-policy-binding \
91+
--member="serviceAccount:${PUBSUB_SERVICE_ACCOUNT}" \
92+
--role="${roleid}" \
93+
--table=true "${PROJECT_ID}:${bq_dataset}.${scone_table}"
94+
gcloud pubsub subscriptions create "${subscription_bigquery_import_scone}" \
95+
--topic="${topic_bigquery_import_scone}" \
96+
--bigquery-table="${PROJECT_ID}:${bq_dataset}.${scone_table}" \
97+
--use-table-schema \
98+
--dead-letter-topic="${deadletter_topic_bigquery_import_scone}" \
99+
--max-delivery-attempts=5 \
100+
--dead-letter-topic-project="${PROJECT_ID}"
101+
gcloud pubsub subscriptions create "${subscription_bigquery_import_classifications}" \
102+
--topic="${topic_bigquery_import_classifications}" \
103+
--bigquery-table="${PROJECT_ID}:${bq_dataset}.${classifications_table}" \
104+
--use-table-schema \
105+
--dead-letter-topic="${deadletter_topic_bigquery_import_classifications}" \
106+
--max-delivery-attempts=5 \
107+
--dead-letter-topic-project="${PROJECT_ID}"
108+
109+
# this allows dead-lettered messages to be forwarded from the BigQuery subscription to the dead letter topic
110+
# and it allows dead-lettered messages to be published to the dead letter topic.
111+
gcloud pubsub topics add-iam-policy-binding "${deadletter_topic_bigquery_import_scone}" \
112+
--member="serviceAccount:$PUBSUB_SERVICE_ACCOUNT"\
113+
--role="roles/pubsub.publisher"
114+
gcloud pubsub subscriptions add-iam-policy-binding "${subscription_bigquery_import_scone}" \
115+
--member="serviceAccount:$PUBSUB_SERVICE_ACCOUNT"\
116+
--role="roles/pubsub.subscriber"
117+
gcloud pubsub topics add-iam-policy-binding "${deadletter_topic_bigquery_import_classifications}" \
118+
--member="serviceAccount:$PUBSUB_SERVICE_ACCOUNT"\
119+
--role="roles/pubsub.publisher"
120+
gcloud pubsub subscriptions add-iam-policy-binding "${subscription_bigquery_import_classifications}" \
121+
--member="serviceAccount:$PUBSUB_SERVICE_ACCOUNT"\
122+
--role="roles/pubsub.subscriber"
123+
124+
echo "Creating container image and deploying to Cloud Run..."
125+
moduledir="." # deploys what's in our current directory
126+
config="${moduledir}/cloudbuild.yaml"
127+
url=$(gcloud builds submit --config="${config}" \
128+
--substitutions="_SURVEY=${survey},_TESTID=${testid},_MODULE_NAME=${cr_module_name},_REPOSITORY=${artifact_registry_repo}" \
129+
--region="${region}" \
130+
"${moduledir}" | sed -n 's/^Step #2: Service URL: \(.*\)$/\1/p')
131+
132+
# ensure the Cloud Run service has the necessary permisions
133+
role="roles/run.invoker"
134+
gcloud run services add-iam-policy-binding "${cr_module_name}" \
135+
--member="serviceAccount:${runinvoker_svcact}" \
136+
--role="${role}"
137+
138+
echo "Creating trigger subscription for Cloud Run..."
139+
# WARNING: This is set to retry failed deliveries. If there is a bug in main.py this will
140+
# retry indefinitely, until the message is delete manually.
141+
gcloud pubsub subscriptions create "${ps_input_subscrip}" \
142+
--topic "${trigger_topic}" \
143+
--topic-project "${PROJECT_ID}" \
144+
--ack-deadline=600 \
145+
--push-endpoint="${url}${ROUTE_RUN}" \
146+
--push-auth-service-account="${runinvoker_svcact}"
147+
fi
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
# As explained here
2+
# https://cloud.google.com/functions/docs/writing/specifying-dependencies-python
3+
# dependencies for a Cloud Function must be specified in a `requirements.txt`
4+
# file (or packaged with the function) in the same directory as `main.py`
5+
6+
google-cloud-logging
7+
pittgoogle-client>=0.3.14
8+
9+
# for Cloud Run
10+
# https://cloud.google.com/run/docs/quickstarts/build-and-deploy/deploy-python-service
11+
# pinned following quickstart example. [TODO] consider un-pinning
12+
Flask==3.0.3
13+
gunicorn==23.0.0
14+
Werkzeug==3.0.6

0 commit comments

Comments
 (0)