Skip to content

Commit 7e3b5cc

Browse files
committed
feat: d4g-s3-backup Docker image
1 parent ccf6762 commit 7e3b5cc

File tree

6 files changed

+302
-54
lines changed

6 files changed

+302
-54
lines changed

.dockerignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
.DS_Store

.github/workflows/main.yml

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
name: main
2+
3+
on:
4+
push:
5+
branches:
6+
- main
7+
8+
env:
9+
REGISTRY: ghcr.io
10+
IMAGE_NAME: ${{ github.repository }}
11+
12+
jobs:
13+
main:
14+
runs-on: ubuntu-latest
15+
steps:
16+
- name: Checkout
17+
uses: actions/checkout@v2
18+
- name: Set up QEMU
19+
uses: docker/setup-qemu-action@v1
20+
- name: Set up Docker Buildx
21+
uses: docker/setup-buildx-action@v1
22+
- name: Log in to the Container registry
23+
uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
24+
with:
25+
registry: ${{ env.REGISTRY }}
26+
username: ${{ github.actor }}
27+
password: ${{ secrets.GITHUB_TOKEN }}
28+
- name: Extract metadata (tags, labels) for Docker
29+
id: meta
30+
uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
31+
with:
32+
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
33+
- name: Run Buildx
34+
run: |
35+
docker buildx build \
36+
--push \
37+
--platform=linux/amd64,linux/arm64,linux/armhf \
38+
-t ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest -f ./Dockerfile ./

.gitignore

Lines changed: 1 addition & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -1,52 +1 @@
1-
# Prerequisites
2-
*.d
3-
4-
# Object files
5-
*.o
6-
*.ko
7-
*.obj
8-
*.elf
9-
10-
# Linker output
11-
*.ilk
12-
*.map
13-
*.exp
14-
15-
# Precompiled Headers
16-
*.gch
17-
*.pch
18-
19-
# Libraries
20-
*.lib
21-
*.a
22-
*.la
23-
*.lo
24-
25-
# Shared objects (inc. Windows DLLs)
26-
*.dll
27-
*.so
28-
*.so.*
29-
*.dylib
30-
31-
# Executables
32-
*.exe
33-
*.out
34-
*.app
35-
*.i*86
36-
*.x86_64
37-
*.hex
38-
39-
# Debug files
40-
*.dSYM/
41-
*.su
42-
*.idb
43-
*.pdb
44-
45-
# Kernel Module Compile Results
46-
*.mod*
47-
*.cmd
48-
.tmp_versions/
49-
modules.order
50-
Module.symvers
51-
Mkfile.old
52-
dkms.conf
1+
.DS_Store

Dockerfile

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
FROM debian:sid-slim
2+
3+
LABEL org.opencontainers.image.source = "https://github.com/dataforgoodfr/d4g-s3-backup"
4+
LABEL org.opencontainers.image.authors = "Data For Good"
5+
6+
RUN apt update && apt install -y s3cmd && apt clean
7+
8+
ADD ./entrypoint.sh /opt/entrypoint.sh
9+
10+
ENTRYPOINT ["/opt/entrypoint.sh"]

README.md

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,12 @@
1-
# d4g-s3cmd
2-
s3cmd Dockerizing for dataforgood
1+
# d4g-s3-backup
2+
3+
This repository is an attempt at creating a standard Docker image used to backup our various services files.
4+
5+
The result is a simple, fully configurable Docker image.
6+
7+
## Usage
8+
Usage is documented in-script, to display the help menu use
9+
10+
```
11+
docker run -it --rm ghcr.io/dataforgoodfr/d4g-s3-backup:latest --help
12+
```

entrypoint.sh

Lines changed: 240 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,240 @@
1+
#!/usr/bin/env bash
2+
set -Eeuo pipefail
3+
4+
trap cleanup SIGINT SIGTERM ERR EXIT
5+
6+
usage() {
7+
cat <<EOF
8+
USAGE docker run -it --rm -v /var/data:/data -v /opt/backups:/backups ghcr.io/dataforgoodfr/d4g-s3-backup \\
9+
[--access-key="<access_key>"] \\
10+
[--secret-key="<secret_key>"] \\
11+
[--bucket-name="backups"] \\
12+
[--host-base="%(bucket)s.s3.fr-par.scw.cloud"] \\
13+
[--data-dir="/data"] \\
14+
[--backups-dir="/backups"] \\
15+
[--service-name="service"] \\
16+
[--retention-days=30] \\
17+
[--bucket-region="fr-par"] \\
18+
[--prom-metrics] \\
19+
[--debug] \\
20+
[--help]
21+
22+
Create backups for a specific dir easily and sync them to an s3 compatible bucket.
23+
This script also supports publishing prometheu-compatible metrics through the Textfile Collector.
24+
25+
Data from <data_dir> will be backed up to <backups-dir>/<service-name>/<service-name>-$(date +%Y-%m-%d).tar.gz
26+
Files will be keps around for <retention-days> days.
27+
Files will be synced to s3 under s3://<bucket-name>/<service-name> using supplied credentials and configuration.
28+
29+
Supported parameters :
30+
-h, --help : display this message
31+
--debug : Print configuration before running (Optional)
32+
--access-key : AWS access key (Required)
33+
--secret-key : AWS secret key (Required)
34+
--bucket-name : name of the bucket to sync backups to (Optional, Default backups)
35+
--data-dir : directory to backup (Optional, Default ./data)
36+
--service-name : name of the service to backup (Optional, Default service)
37+
--backups-dir : backups root directory where will be stored (Optional, Default /opt/backups/)
38+
--host-bucket : Bucket host base (Optional, Default \${BUCKET_NAME}s.s3.fr-par.scw.cloud)
39+
--host-base : S3 host base (Optional, Default %(bucket)s.s3.fr-par.scw.cloud)
40+
--bucket-region : S3 bucket region (Optional, Default fr-par)
41+
--retention-days : number of days to keep backups (Default 30)
42+
--prom-metrics : enable prometheus metrics (Default false)
43+
EOF
44+
exit 1
45+
}
46+
47+
cleanup() {
48+
trap - SIGINT SIGTERM ERR EXIT
49+
if [ "$PROM_METRICS" == "true" ]; then
50+
write_metrics
51+
fi
52+
if [ "$FAILURE" != 0]; then
53+
error "Backup for $SERVICE_NAME $(date +%Y-%m-%d) failed."
54+
fi
55+
exit 0
56+
}
57+
58+
function write_metrics() {
59+
# Write out metrics to a temporary file.
60+
END="$(date +%s)"
61+
# Last successful timestamp is now
62+
TIMESTAMP="$END"
63+
if [ "$FAILURE" != 0 ]; then
64+
TIMESTAMP="0"
65+
fi
66+
cat << EOF > "$TEXTFILE_COLLECTOR_DIR/${SERVICE_NAME}_backup.prom.$$"
67+
# HELP ${SERVICE_NAME}_backup_duration Duration of the planned ${SERVICE_NAME} backup
68+
# TYPE ${SERVICE_NAME}_backup_duration counter
69+
${SERVICE_NAME}_backup_duration $((END - START))
70+
# HELP ${SERVICE_NAME}_backup_failure Result of the planned ${SERVICE_NAME} backup
71+
# TYPE ${SERVICE_NAME}_backup_failure gauge
72+
${SERVICE_NAME}_backup_failure $FAILURE
73+
# HELP ${SERVICE_NAME}_backup_last_time Timestamp of last successful backup
74+
# TYPE ${SERVICE_NAME}_backup_last_time gauge
75+
${SERVICE_NAME}_backup_last_time $TIMESTAMP
76+
EOF
77+
78+
# Rename the temporary file atomically.
79+
# This avoids the node exporter seeing half a file.
80+
mv "$TEXTFILE_COLLECTOR_DIR/${SERVICE_NAME}_backup.prom.$$" \
81+
"$TEXTFILE_COLLECTOR_DIR/${SERVICE_NAME}_backup.prom"
82+
}
83+
84+
setup_colors() {
85+
if [[ -t 2 ]] && [[ -z "${NO_COLOR-}" ]] && [[ "${TERM-}" != "dumb" ]]; then
86+
# shellcheck disable=SC2034
87+
NOCOLOR='\033[0m' RED='\033[0;31m' GREEN='\033[0;32m' ORANGE='\033[0;33m' BLUE='\033[0;34m' PURPLE='\033[0;35m' CYAN='\033[0;36m' YELLOW='\033[1;33m'
88+
else
89+
echo "coucou"
90+
NOCOLOR='' RED='' GREEN='' ORANGE='' BLUE='' PURPLE='' CYAN='' YELLOW=''
91+
fi
92+
}
93+
94+
info() {
95+
echo -e "${GREEN}$*${NOCOLOR}"
96+
}
97+
98+
error() {
99+
echo -e "${RED}$*${NOCOLOR}"
100+
}
101+
102+
debug() {
103+
if [ "$DEBUG" == 'true' ]; then
104+
echo -e "$1"
105+
fi
106+
}
107+
108+
parse_params() {
109+
if [ $# -gt 12 ]; then
110+
echo "Too many parameters provided"
111+
usage
112+
fi
113+
114+
# Internal variables
115+
FAILURE=1
116+
START="$(date +%s)"
117+
118+
# Sane defaults
119+
DEBUG="false"
120+
DATA_DIR="/data"
121+
SERVICE_NAME="app"
122+
BACKUPS_DIR="/backups"
123+
BUCKET_NAME="backups"
124+
HOST_BASE="s3.fr-par.scw.cloud"
125+
HOST_BUCKET="%(bucket)s.s3.fr-par.scw.cloud"
126+
BUCKET_REGION="fr-par"
127+
RETENTION_DAYS="30"
128+
PROM_METRICS="false"
129+
ACCESS_KEY=""
130+
SECRET_KEY=""
131+
132+
while :; do
133+
case "${1-}" in
134+
-h | --help)
135+
usage
136+
;;
137+
--debug)
138+
DEBUG="true"
139+
;;
140+
--access-key=*)
141+
ACCESS_KEY="${1#*=}"
142+
;;
143+
--secret-key=*)
144+
SECRET_KEY="${1#*=}"
145+
;;
146+
--data-dir=*)
147+
DATA_DIR="${1#*=}"
148+
;;
149+
--service-name=*)
150+
SERVICE_NAME="${1#*=}"
151+
;;
152+
--backups-dir=*)
153+
BACKUPS_DIR="${1#*=}"
154+
;;
155+
--bucket-name=*)
156+
BUCKET_NAME="${1#*=}"
157+
;;
158+
--host-base=*)
159+
HOST_BASE="${1#*=}"
160+
;;
161+
--host-bucket=*)
162+
HOST_BUCKET="${1#*=}"
163+
;;
164+
--bucket-region=*)
165+
BUCKET_REGION="${1#*=}"
166+
;;
167+
--retention-days=*)
168+
RETENTION_DAYS="${1#*=}"
169+
;;
170+
--prom-metrics*)
171+
PROM_METRICS="true"
172+
;;
173+
-?*)
174+
echo "Unknown option: $1"
175+
usage
176+
;;
177+
*)
178+
break
179+
;;
180+
esac
181+
shift
182+
done
183+
184+
# Validate required parameters
185+
if [ -z "${ACCESS_KEY}" ]; then
186+
error "Missing required parameter: --access-key"
187+
usage
188+
fi
189+
190+
if [ -z "${SECRET_KEY}" ]; then
191+
error "Missing required parameter: --secret-key"
192+
usage
193+
fi
194+
195+
BACKUP_DIR="${BACKUPS_DIR}/${SERVICE_NAME}/"
196+
BACKUP_FILE="${BACKUP_DIR}${SERVICE_NAME}-$(date +%Y-%m-%d).tar.gz"
197+
BUCKET_PATH="s3://${BUCKET_NAME}/${SERVICE_NAME}/"
198+
199+
return 0
200+
}
201+
202+
create_s3_config() {
203+
echo "[default]" >> /.s3cfg
204+
echo "use_https = True" >> /.s3cfg
205+
echo "access_key = ${ACCESS_KEY}" >> /.s3cfg
206+
echo "secret_key = ${SECRET_KEY}" >> /.s3cfg
207+
echo "host_base = ${HOST_BASE}" >> /.s3cfg
208+
echo "host_bucket = ${HOST_BUCKET}" >> /.s3cfg
209+
echo "bucket_location = ${BUCKET_REGION}" >> /.s3cfg
210+
211+
debug "S3 configuration :"
212+
debug "$(cat /.s3cfg)"
213+
}
214+
215+
setup_colors
216+
parse_params "$@"
217+
create_s3_config
218+
219+
cd "$DATA_DIR"
220+
221+
222+
# Create backup directory for service if it doesn't exist.
223+
debug "Creating backups directory : ${BACKUP_DIR}"
224+
mkdir -p "${BACKUP_DIR}"
225+
226+
# Cleanup backups that are older than RETENTION_DAYS days
227+
debug "Finding backups older than $RETENTION_DAYS in ${BACKUP_DIR}"
228+
find "${BACKUP_DIR}" -type f -name "${SERVICE_NAME}-*.tar.gz" -mtime +"$RETENTION_DAYS" -exec rm -f {} \;
229+
230+
debug "Compressing files to ${BACKUP_FILE}"
231+
tar -czf "${BACKUP_FILE}" ./
232+
233+
debug "Uploading ${BACKUP_DIR} to ${BUCKET_PATH}"
234+
/usr/bin/s3cmd --config=/.s3cfg sync "${BACKUP_DIR}" "${BUCKET_PATH}"
235+
FAILURE=0
236+
237+
info "Backup for $SERVICE_NAME $(date +%Y-%m-%d) completed successfully."
238+
if [ "$PROM_METRICS" == "true" ]; then
239+
write_metrics
240+
fi

0 commit comments

Comments
 (0)