Skip to content

Commit 2dfb142

Browse files
author
jdeolive
committed
removing old buckets when creating new amis on s3, linked up product id
git-svn-id: http://svn.opengeo.org/suite/trunk@2350 fe66add1-021f-4af3-b75b-ea5154e73f91
1 parent 4e025e6 commit 2dfb142

File tree

5 files changed

+113
-6
lines changed

5 files changed

+113
-6
lines changed

aws/build_ubuntu_ami.sh

+17-1
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ function poll_image() {
7272
}
7373

7474
if [ -z $3 ]; then
75-
echo "Usage: $0 AMI_ID IMAGE_NAME <dev|prod> [-t 'ebs'|'s3'] [ -a 'i386'|'x86_64'] [ -s 'm1.small'|'m1.large'] [--skip-create-image]"
75+
echo "Usage: $0 AMI_ID IMAGE_NAME <dev|prod> [-t 'ebs'|'s3'] [ -a 'i386'|'x86_64'] [ -s 'm1.small'|'m1.large'] [ -p <product_id> ] [--skip-create-image]"
7676
exit 1
7777
fi
7878

@@ -93,6 +93,9 @@ for (( i = 2; i < ${#args[*]}; i++ )); do
9393
if [ $arg == "-s" ]; then
9494
IMAGE_SIZE=$val
9595
fi
96+
if [ $arg == "-p" ]; then
97+
PRODUCT_ID=$val
98+
fi
9699
if [ $arg == "--skip-create-image" ]; then
97100
SKIP_CREATE_IMAGE="yes"
98101
fi
@@ -156,11 +159,24 @@ if [ -z $SKIP_CREATE_IMAGE ]; then
156159

157160
scp $SSH_OPTS s3-$ACCOUNT.properties ubuntu@$HOST:/home/ubuntu/s3.properties
158161
check_rc $? "upload s3 properties"
162+
163+
scp $SSH_OPTS s3cfg-$ACCOUNT ubuntu@$HOST:/home/ubuntu/s3cfg
164+
check_rc $? "upload s3cfg-$ACCOUNT"
159165

160166
ssh $SSH_OPTS ubuntu@$HOST "cd /home/ubuntu && ./bundle_s3_image.sh $IMAGE_NAME $IMAGE_ARCH"
161167
check_rc $? "remote bundle image"
162168
fi
163169
fi
164170

171+
if [ ! -z $PRODUCT_ID ]; then
172+
# link the image to the product id
173+
ec2-modify-image-attribute $IMAGE_ID -p $PRODUCT_ID
174+
check_rc $? "linking image $IMAGE_ID to product $PRODUCT_ID"
175+
176+
# make the image public
177+
ec2-modify-image-attribute $IMAGE_ID -l -a all
178+
check_rc $? "making image $IMAGE_ID public"
179+
fi
180+
165181
# shut down the instance
166182
ec2-terminate-instances $INSTANCE_ID

aws/bundle_s3_image.sh

+13-3
Original file line numberDiff line numberDiff line change
@@ -25,11 +25,11 @@ IMAGE_ARCH=$2
2525
export EC2_PRIVATE_KEY=`ls ~/pk-*`
2626
export EC2_CERT=`ls ~/cert-*`
2727

28-
# install the ec2-api/ami-tools
28+
# install the ec2-api/ami-tools and s3cmd
2929
sudo bash -c "echo 'deb http://us.archive.ubuntu.com/ubuntu/ lucid multiverse' >> /etc/apt/sources.list"
3030
sudo apt-get update
31-
sudo apt-get -y install ec2-api-tools ec2-ami-tools
32-
check_rc $? "apt-get install ec2 api/ami tools"
31+
sudo apt-get -y install ec2-api-tools ec2-ami-tools s3cmd
32+
check_rc $? "apt-get install ec2 api/ami + s3cmd tools"
3333

3434
if [ -z $SKIP_BUNDLE ]; then
3535
# bundle the image
@@ -44,6 +44,16 @@ if [ ! -e $IMAGE_MANIFEST ]; then
4444
fi
4545

4646
S3_BUCKET=$S3_BUCKET_ROOT/$IMAGE_NAME
47+
S3CMD_CONFIG=~/s3cfg
48+
49+
s3cmd -c $S3CMD_CONFIG ls s3://$S3_BUCKET_ROOT
50+
check_rc $? "listing contents of $S3_BUCKET_ROOT"
51+
52+
# figure out if the directory already exists, and delete it if necessary
53+
s3cmd -c $S3CMD_CONFIG ls s3://$S3_BUCKET_ROOT | grep $IMAGE_NAME
54+
if [ $? -eq 0 ]; then
55+
s3cmd -c $S3CMD_CONFIG -r del s3://$S3_BUCKET
56+
fi
4757

4858
if [ -z $SKIP_UPLOAD ]; then
4959
# upload the bundle

aws/hudson_build.sh

+7-2
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,19 @@
11
#!/bin/bash
22

33
if [ -z $4 ]; then
4-
echo "Usage: $0 AMI_ID <i386|x86_64> <ebs|s3> <dev|prod>"
4+
echo "Usage: $0 AMI_ID <i386|x86_64> <ebs|s3> <dev|prod> [-p <product_id>]"
55
exit 1
66
fi
77

88
AMI_ID=$1
99
IMAGE_ARCH=$2
1010
IMAGE_TYPE=$3
1111
ACCOUNT=$4
12+
13+
if [ $5 == "-p" ]; then
14+
PRODUCT_ID=$6
15+
fi
16+
1217
IMAGE_SIZE="m1.small"
1318
if [ $IMAGE_ARCH == "x86_64" ]; then
1419
IMAGE_SIZE="m1.large"
@@ -24,4 +29,4 @@ popd > /dev/null
2429
ver=`get_ami_version $REPO_PATH`
2530

2631
# build it
27-
./build_ubuntu_ami.sh $AMI_ID suite-$ver-$IMAGE_ARCH-`date +"%Y%m%d"` $ACCOUNT -t $IMAGE_TYPE -s $IMAGE_SIZE -a $IMAGE_ARCH
32+
./build_ubuntu_ami.sh $AMI_ID suite-$ver-$IMAGE_ARCH-`date +"%Y%m%d"` $ACCOUNT -t $IMAGE_TYPE -s $IMAGE_SIZE -a $IMAGE_ARCH -p $PRODUCT_ID

aws/s3cfg-dev

+38
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
[default]
2+
access_key = AKIAIEIFXZRDU4AY5B3Q
3+
bucket_location = US
4+
cloudfront_host = cloudfront.amazonaws.com
5+
cloudfront_resource = /2010-07-15/distribution
6+
default_mime_type = binary/octet-stream
7+
delete_removed = False
8+
dry_run = False
9+
encoding = UTF-8
10+
encrypt = False
11+
follow_symlinks = False
12+
force = False
13+
get_continue = False
14+
gpg_command = None
15+
gpg_decrypt = %(gpg_command)s -d --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
16+
gpg_encrypt = %(gpg_command)s -c --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
17+
gpg_passphrase =
18+
guess_mime_type = True
19+
host_base = s3.amazonaws.com
20+
host_bucket = %(bucket)s.s3.amazonaws.com
21+
human_readable_sizes = False
22+
list_md5 = False
23+
log_target_prefix =
24+
preserve_attrs = True
25+
progress_meter = True
26+
proxy_host =
27+
proxy_port = 0
28+
recursive = False
29+
recv_chunk = 4096
30+
reduced_redundancy = False
31+
secret_key = LbEM4l6xxqHiJUY6Kb6eHyvl3Ryn3ItefAz02mnd
32+
send_chunk = 4096
33+
simpledb_host = sdb.amazonaws.com
34+
skip_existing = False
35+
socket_timeout = 10
36+
urlencoding_mode = normal
37+
use_https = False
38+
verbosity = WARNING

aws/s3cfg-prod

+38
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
[default]
2+
access_key = AKIAI7SFU3MT3R5WKHSA
3+
bucket_location = US
4+
cloudfront_host = cloudfront.amazonaws.com
5+
cloudfront_resource = /2010-07-15/distribution
6+
default_mime_type = binary/octet-stream
7+
delete_removed = False
8+
dry_run = False
9+
encoding = UTF-8
10+
encrypt = False
11+
follow_symlinks = False
12+
force = False
13+
get_continue = False
14+
gpg_command = None
15+
gpg_decrypt = %(gpg_command)s -d --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
16+
gpg_encrypt = %(gpg_command)s -c --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
17+
gpg_passphrase =
18+
guess_mime_type = True
19+
host_base = s3.amazonaws.com
20+
host_bucket = %(bucket)s.s3.amazonaws.com
21+
human_readable_sizes = False
22+
list_md5 = False
23+
log_target_prefix =
24+
preserve_attrs = True
25+
progress_meter = True
26+
proxy_host =
27+
proxy_port = 0
28+
recursive = False
29+
recv_chunk = 4096
30+
reduced_redundancy = False
31+
secret_key = aKbeL+akiq069+k8FFScdKXTogTwb8ZnXjD4yObq
32+
send_chunk = 4096
33+
simpledb_host = sdb.amazonaws.com
34+
skip_existing = False
35+
socket_timeout = 10
36+
urlencoding_mode = normal
37+
use_https = False
38+
verbosity = WARNING

0 commit comments

Comments
 (0)