@@ -198,8 +198,11 @@ deploy_aws_resources() {
198
198
# clean up previously generated resources if any
199
199
cleanup_generated_resources
200
200
# Create the S3 bucket (to store config files) if it doesn't exist
201
- log_streaming_data " creating s3 bucket, if it does not exist"
201
+ log_streaming_data " creating s3 config bucket, if it does not exist"
202
202
validate_or_create_s3_bucket " $s3_bucket_for_storage " " $region " " $aws_account_id "
203
+ # Create the S3 data bucket if it doesn't exist
204
+ log_streaming_data " creating s3 data bucket, if it does not exist"
205
+ validate_or_create_s3_bucket " $s3_bucket_data_pipeline " " $region " " $aws_account_id "
203
206
# Deploy PCE Terraform scripts
204
207
onedocker_ecs_container_image=' 539290649537.dkr.ecr.us-west-2.amazonaws.com/one-docker-prod:latest'
205
208
publisher_vpc_cidr=' 10.0.0.0/16'
@@ -291,15 +294,14 @@ deploy_aws_resources() {
291
294
-var " tag_postfix=$tag_postfix " \
292
295
-var " aws_account_id=$aws_account_id " \
293
296
-var " data_processing_output_bucket=$s3_bucket_data_pipeline " \
297
+ -var " data_processing_output_bucket_arn=$data_bucket_arn " \
294
298
-var " data_ingestion_lambda_name=$data_ingestion_lambda_name " \
295
299
-var " data_processing_lambda_s3_bucket=$s3_bucket_for_storage " \
296
300
-var " data_processing_lambda_s3_key=lambda.zip" \
297
301
-var " data_upload_key_path=$data_upload_key_path " \
298
302
-var " query_results_key_path=$query_results_key_path "
299
303
echo " ######################## Deploy Data Ingestion Terraform scripts completed ########################"
300
304
# store the outputs from data ingestion pipeline output into variables
301
- app_data_input_bucket_id=$( terraform output data_processing_output_bucket_id | tr -d ' "' )
302
- app_data_input_bucket_arn=$( terraform output data_processing_output_bucket_arn | tr -d ' "' )
303
305
firehose_stream_name=$( terraform output firehose_stream_name | tr -d ' "' )
304
306
305
307
if " $build_semi_automated_data_pipeline "
@@ -312,7 +314,7 @@ deploy_aws_resources() {
312
314
cp template/lambda_trigger.py .
313
315
echo " Updating trigger function configurations..."
314
316
sed -i " s/glueJobName = \" TO_BE_UPDATED_DURING_DEPLOYMENT\" /glueJobName = \" glue-ETL$tag_postfix \" /g" lambda_trigger.py
315
- sed -i " s~s3_write_path = \" TO_BE_UPDATED_DURING_DEPLOYMENT\" ~s3_write_path = \" $app_data_input_bucket_id /events_data/\" ~g" lambda_trigger.py
317
+ sed -i " s~s3_write_path = \" TO_BE_UPDATED_DURING_DEPLOYMENT\" ~s3_write_path = \" $s3_bucket_data_pipeline /events_data/\" ~g" lambda_trigger.py
316
318
317
319
echo " ######################## Initializing terraform working directory started ########################"
318
320
terraform init -reconfigure \
@@ -328,8 +330,8 @@ deploy_aws_resources() {
328
330
-var " aws_account_id=$aws_account_id " \
329
331
-var " lambda_trigger_s3_key=lambda_trigger.zip" \
330
332
-var " app_data_input_bucket=$s3_bucket_data_pipeline " \
331
- -var " app_data_input_bucket_id=$app_data_input_bucket_id " \
332
- -var " app_data_input_bucket_arn=$app_data_input_bucket_arn " \
333
+ -var " app_data_input_bucket_id=$s3_bucket_data_pipeline " \
334
+ -var " app_data_input_bucket_arn=$data_bucket_arn " \
333
335
-var " data_upload_key_path=$data_upload_key_path "
334
336
echo " ######################## Deploy Semi-automated Data Ingestion Terraform scripts completed ########################"
335
337
fi
406
408
s3_bucket_data_pipeline=" $s3_bucket_data_pipeline$tag_postfix "
407
409
fi
408
410
411
+ data_bucket_arn=" arn:aws:s3:::${s3_bucket_data_pipeline} "
409
412
policy_name=" fb-pc-policy${tag_postfix} "
410
413
database_name=" mpc-events-db${tag_postfix} "
411
414
glue_crawler_name=" mpc-events-crawler${tag_postfix} "
0 commit comments