Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ jobs:
run: |
if [[ -n "${GITHUB_HEAD_REF}" ]]; then
# This is a PR, use the source branch name
echo "REF_NAME=${GITHUB_HEAD_REF}" >> $GITHUB_ENV
echo "REF_NAME=${GITHUB_HEAD_REF/\//-}" >> $GITHUB_ENV
else
# This is a push, use the branch or tag name from GITHUB_REF
echo "REF_NAME=${GITHUB_REF##*/}" >> $GITHUB_ENV
Expand Down
3 changes: 3 additions & 0 deletions automation/config_pgcluster.yml
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,9 @@
- role: postgresql_privs
when: inventory_hostname in groups['primary']

- role: backup
when: wal_g_install | bool or pgbackrest_install | bool

- role: wal_g
when: wal_g_install | bool

Expand Down
7 changes: 7 additions & 0 deletions automation/deploy_pgcluster.yml
Original file line number Diff line number Diff line change
Expand Up @@ -284,6 +284,8 @@
- role: ntp
- role: ssh_keys
- role: copy
- role: backup
when: wal_g_install | bool or pgbackrest_install | bool

- name: deploy_pgcluster.yml | Deploy balancers
ansible.builtin.import_playbook: balancers.yml
Expand Down Expand Up @@ -317,6 +319,8 @@
- pgbackrest_auto_conf | default(true) | bool # to be able to disable auto backup settings
tags: always
roles:
- role: backup
when: pgbackrest_install | bool
- role: pgbackrest
when: pgbackrest_install | bool

Expand Down Expand Up @@ -354,6 +358,9 @@
tags: always

roles:
- role: backup
when: (wal_g_install | bool) or (pgbackrest_install | bool)

- role: wal_g
when: wal_g_install|bool

Expand Down
40 changes: 40 additions & 0 deletions automation/roles/backup/defaults/main.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
---
# Backups (if 'pgbackrest_install' or 'wal_g_install' is 'true')
aws_s3_bucket_create: true # if 'cloud_provider=aws'
aws_s3_bucket_name: "{{ patroni_cluster_name }}-backup" # Name of the S3 bucket.
aws_s3_bucket_region: "{{ server_location }}" # The AWS region to use.
aws_s3_bucket_object_lock_enabled: false # Whether S3 Object Lock to be enabled.
aws_s3_bucket_encryption: "AES256" # Describes the default server-side encryption to apply to new objects in the bucket. Choices: "AES256", "aws:kms"
aws_s3_bucket_block_public_acls: true # Sets BlockPublicAcls value.
aws_s3_bucket_ignore_public_acls: true # Sets IgnorePublicAcls value.
aws_s3_bucket_absent: false # Allow to delete S3 bucket when deleting a cluster servers using the 'state=absent' variable.

gcp_bucket_create: true # if 'cloud_provider=gcp'
gcp_bucket_name: "{{ patroni_cluster_name }}-backup" # Name of the GCS bucket.
gcp_bucket_storage_class: "MULTI_REGIONAL" # The bucket’s default storage class.
gcp_bucket_default_object_acl: "projectPrivate" # Apply a predefined set of default object access controls to this bucket.
gcp_bucket_absent: false # Allow to delete GCS bucket when deleting a cluster servers using the 'state=absent' variable.

azure_blob_storage_create: true # if 'cloud_provider=azure'
azure_blob_storage_name: "{{ patroni_cluster_name }}-backup" # Name of a blob container within the storage account.
azure_blob_storage_blob_type: "block" # Type of blob object. Values include: block, page.
azure_blob_storage_account_name: "{{ patroni_cluster_name | lower | replace('-', '') | truncate(24, true, '') }}"
azure_blob_storage_account_type: "Standard_RAGRS" # Type of storage account.
azure_blob_storage_account_kind: "BlobStorage" # The kind of storage. Values include: Storage, StorageV2, BlobStorage, BlockBlobStorage, FileStorage.
azure_blob_storage_account_access_tier: "Hot" # The access tier for this storage account. Required when kind=BlobStorage.
azure_blob_storage_account_public_network_access: "Enabled" # Allow public network access to Storage Account to create Blob Storage container.
azure_blob_storage_account_allow_blob_public_access: false # Disallow public anonymous access.
azure_blob_storage_absent: false # Allow to delete Azure Blob Storage when deleting a cluster servers using the 'state=absent' variable.

digital_ocean_spaces_create: true # if 'cloud_provider=digitalocean'
digital_ocean_spaces_name: "{{ patroni_cluster_name }}-backup" # Name of the Spaces Object Storage (S3 bucket).
digital_ocean_spaces_region: "nyc3" # The region to create the Space in.
digital_ocean_spaces_absent: false # Allow to delete Spaces Object Storage when deleting a cluster servers using the 'state=absent' variable.

hetzner_object_storage_create: true # if 'cloud_provider=hetzner'
hetzner_object_storage_name: "{{ patroni_cluster_name }}-backup" # Name of the Object Storage (S3 bucket).
hetzner_object_storage_region: "{{ server_location }}" # The region where the Object Storage (S3 bucket) will be created.
hetzner_object_storage_endpoint: "https://{{ hetzner_object_storage_region }}.your-objectstorage.com"
hetzner_object_storage_access_key: "" # (required) Object Storage ACCESS KEY
hetzner_object_storage_secret_key: "" # (required) Object Storage SECRET KEY
hetzner_object_storage_absent: false # Allow to delete Object Storage when deleting a cluster servers using the 'state=absent' variable.
17 changes: 17 additions & 0 deletions automation/roles/backup/tasks/aws.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
---
# S3 bucket (Backups)
- name: "AWS: Create S3 bucket '{{ aws_s3_bucket_name }}'"
amazon.aws.s3_bucket:
access_key: "{{ lookup('ansible.builtin.env', 'AWS_ACCESS_KEY_ID') }}"
secret_key: "{{ lookup('ansible.builtin.env', 'AWS_SECRET_ACCESS_KEY') }}"
name: "{{ aws_s3_bucket_name }}"
region: "{{ aws_s3_bucket_region }}"
object_lock_enabled: "{{ aws_s3_bucket_object_lock_enabled }}"
encryption: "{{ aws_s3_bucket_encryption }}"
public_access:
block_public_acls: "{{ aws_s3_bucket_block_public_acls }}"
ignore_public_acls: "{{ aws_s3_bucket_ignore_public_acls }}"
state: present
when:
- (pgbackrest_install | bool or wal_g_install | bool)
- aws_s3_bucket_create | bool
37 changes: 37 additions & 0 deletions automation/roles/backup/tasks/azure.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
---
# Azure Blob Storage (Backups)
- block:
- name: "Azure: Create Storage Account '{{ azure_blob_storage_account_name }}'"
azure.azcollection.azure_rm_storageaccount:
resource_group: "{{ azure_resource_group | default('postgres-cluster-resource-group' ~ '-' ~ server_location) }}"
name: "{{ azure_blob_storage_account_name }}"
account_type: "{{ azure_blob_storage_account_type }}"
kind: "{{ azure_blob_storage_account_kind }}"
access_tier: "{{ azure_blob_storage_account_access_tier }}"
public_network_access: "{{ azure_blob_storage_account_public_network_access }}"
allow_blob_public_access: "{{ azure_blob_storage_account_allow_blob_public_access }}"
state: present

- name: "Azure: Get Storage Account info"
azure.azcollection.azure_rm_storageaccount_info:
resource_group: "{{ azure_resource_group | default('postgres-cluster-resource-group' ~ '-' ~ server_location) }}"
name: "{{ azure_blob_storage_account_name }}"
show_connection_string: true
no_log: true # do not output storage account contents to the ansible log
register: azure_storage_account_info

- name: "Set variable: azure_storage_account_key"
ansible.builtin.set_fact:
azure_storage_account_key: "{{ azure_storage_account_info.storageaccounts[0].primary_endpoints.key }}"
no_log: true # do not output storage account contents to the ansible log

- name: "Azure: Create Blob Storage container '{{ azure_blob_storage_name }}'"
azure.azcollection.azure_rm_storageblob:
resource_group: "{{ azure_resource_group | default('postgres-cluster-resource-group' ~ '-' ~ server_location) }}"
account_name: "{{ azure_blob_storage_account_name }}"
container: "{{ azure_blob_storage_name }}"
blob_type: "{{ azure_blob_storage_blob_type }}"
state: present
when:
- (pgbackrest_install | bool or wal_g_install | bool)
- azure_blob_storage_create | bool
13 changes: 13 additions & 0 deletions automation/roles/backup/tasks/digitalocean.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
---
# Spaces Object Storage (Backups)
- name: "DigitalOcean: Create Spaces Bucket '{{ digital_ocean_spaces_name }}'"
community.digitalocean.digital_ocean_spaces:
oauth_token: "{{ lookup('ansible.builtin.env', 'DO_API_TOKEN') }}"
name: "{{ digital_ocean_spaces_name }}"
region: "{{ digital_ocean_spaces_region }}"
aws_access_key_id: "{{ AWS_ACCESS_KEY_ID }}"
aws_secret_access_key: "{{ AWS_SECRET_ACCESS_KEY }}"
state: present
when:
- (pgbackrest_install | bool or wal_g_install | bool)
- digital_ocean_spaces_create | bool
14 changes: 14 additions & 0 deletions automation/roles/backup/tasks/gcp.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
---
# GCS Bucket (Backups)
- name: "GCP: Create bucket '{{ gcp_bucket_name }}'"
google.cloud.gcp_storage_bucket:
auth_kind: "serviceaccount"
service_account_contents: "{{ gcp_service_account_contents }}"
project: "{{ gcp_project | default(project_info.resources[0].projectNumber) }}"
name: "{{ gcp_bucket_name }}"
storage_class: "{{ gcp_bucket_storage_class }}"
predefined_default_object_acl: "{{ gcp_bucket_default_object_acl }}"
state: present
when:
- (pgbackrest_install | bool or wal_g_install | bool)
- gcp_bucket_create | bool
115 changes: 115 additions & 0 deletions automation/roles/backup/tasks/hetzner.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
---
# Object Storage (S3 bucket for backups)
- name: Ensure that 'boto3' dependency is present on controlling host
ansible.builtin.pip:
name: boto3
executable: pip3
extra_args: --user
become: false
vars:
ansible_become: false
environment:
PATH: "{{ ansible_env.PATH }}:/usr/local/bin:/usr/bin"
PIP_BREAK_SYSTEM_PACKAGES: "1"
when:
- (pgbackrest_install | bool or wal_g_install | bool)
- hetzner_object_storage_create | bool
- name: "Hetzner Cloud: Create Object Storage (S3 bucket) '{{ hetzner_object_storage_name }}'"
amazon.aws.s3_bucket:
endpoint_url: "{{ hetzner_object_storage_endpoint }}"
ceph: true
aws_access_key: "{{ hetzner_object_storage_access_key }}"
aws_secret_key: "{{ hetzner_object_storage_secret_key }}"
name: "{{ hetzner_object_storage_name }}"
region: "{{ hetzner_object_storage_region }}"
requester_pays: false
state: present
register: s3_bucket_result
failed_when: s3_bucket_result.failed and not "GetBucketRequestPayment" in s3_bucket_result.msg
# TODO: https://github.com/ansible-collections/amazon.aws/issues/2447
when:
- (pgbackrest_install | bool or wal_g_install | bool)
- hetzner_object_storage_create | bool
- hetzner_object_storage_access_key | length > 0
- hetzner_object_storage_secret_key | length > 0

- name: "Hetzner Cloud: Delete Object Storage (S3 bucket) '{{ hetzner_object_storage_name }}'"
amazon.aws.s3_bucket:
endpoint_url: "{{ hetzner_object_storage_endpoint }}"
ceph: true
access_key: "{{ hetzner_object_storage_access_key }}"
secret_key: "{{ hetzner_object_storage_secret_key }}"
name: "{{ hetzner_object_storage_name }}"
region: "{{ hetzner_object_storage_region }}"
requester_pays: false
state: absent
force: true
when:
- (pgbackrest_install | bool or wal_g_install | bool)
- hetzner_object_storage_absent | bool
- hetzner_object_storage_access_key | length > 0
- hetzner_object_storage_secret_key | length > 0

- name: "Set variable 'pgbackrest_conf' for backup in Hetzner Object Storage (S3 bucket)"
ansible.builtin.set_fact:
pgbackrest_conf:
global:
- { option: "log-level-file", value: "detail" }
- { option: "log-path", value: "/var/log/pgbackrest" }
- { option: "repo1-type", value: "s3" }
- { option: "repo1-path", value: "{{ PGBACKREST_REPO_PATH | default('/pgbackrest') }}" }
- { option: "repo1-s3-key", value: "{{ PGBACKREST_S3_KEY | default(hetzner_object_storage_access_key | default('')) }}" }
- { option: "repo1-s3-key-secret", value: "{{ PGBACKREST_S3_KEY_SECRET | default(hetzner_object_storage_secret_key | default('')) }}" }
- { option: "repo1-s3-bucket", value: "{{ PGBACKREST_S3_BUCKET | default(hetzner_object_storage_name | default(patroni_cluster_name + '-backup')) }}" }
- { option: "repo1-s3-endpoint", value: "{{ PGBACKREST_S3_ENDPOINT | default(hetzner_object_storage_endpoint |
default('https://' + (hetzner_object_storage_region | default(server_location)) + '.your-objectstorage.com')) }}" }
- { option: "repo1-s3-region", value: "{{ PGBACKREST_S3_REGION | default(hetzner_object_storage_region | default(server_location)) }}" }
- { option: "repo1-s3-uri-style", value: "{{ PGBACKREST_S3_URI_STYLE | default('path') }}" }
- { option: "repo1-retention-full", value: "{{ PGBACKREST_RETENTION_FULL | default('4') }}" }
- { option: "repo1-retention-archive", value: "{{ PGBACKREST_RETENTION_ARCHIVE | default('4') }}" }
- { option: "repo1-retention-archive-type", value: "{{ PGBACKREST_RETENTION_ARCHIVE_TYPE | default('full') }}" }
- { option: "repo1-bundle", value: "y" }
- { option: "repo1-block", value: "y" }
- { option: "start-fast", value: "y" }
- { option: "stop-auto", value: "y" }
- { option: "link-all", value: "y" }
- { option: "resume", value: "n" }
- { option: "archive-async", value: "y" }
- { option: "archive-get-queue-max", value: "1GiB" }
- { option: "spool-path", value: "/var/spool/pgbackrest" }
- { option: "process-max", value: "{{ PGBACKREST_PROCESS_MAX | default([ansible_processor_vcpus | int // 2, 1] | max) }}" }
- { option: "backup-standby", value: "{{ 'y' if groups['postgres_cluster'] | length > 1 else 'n' }}" }
stanza:
- { option: "log-level-console", value: "info" }
- { option: "recovery-option", value: "recovery_target_action=promote" }
- { option: "pg1-path", value: "{{ postgresql_data_dir }}" }
# delegate_to: localhost
run_once: true # noqa run-once
no_log: true # do not output contents to the ansible log

# Hetzner Object Storage (if 'cloud_provider=hetzner')
- name: "Set variable 'wal_g_json' for backup in AWS S3 bucket"
ansible.builtin.set_fact:
wal_g_json:
- { option: "AWS_ACCESS_KEY_ID", value: "{{ WALG_AWS_ACCESS_KEY_ID | default(hetzner_object_storage_access_key | default('')) }}" }
- { option: "AWS_SECRET_ACCESS_KEY", value: "{{ WALG_AWS_SECRET_ACCESS_KEY | default(hetzner_object_storage_secret_key | default('')) }}" }
- { option: "AWS_ENDPOINT", value: "{{ WALG_S3_ENDPOINT | default(hetzner_object_storage_endpoint |
default('https://' + (hetzner_object_storage_region | default(server_location)) + '.your-objectstorage.com')) }}" }
- { option: "AWS_S3_FORCE_PATH_STYLE", value: "{{ AWS_S3_FORCE_PATH_STYLE | default(true) }}" }
- { option: "AWS_REGION", value: "{{ WALG_S3_REGION | default(hetzner_object_storage_region | default(server_location)) }}" }
- {
option: "WALG_S3_PREFIX",
value: "{{ WALG_S3_PREFIX | default('s3://' + (hetzner_object_storage_name | default(patroni_cluster_name + '-backup'))) }}",
}
- { option: "WALG_COMPRESSION_METHOD", value: "{{ WALG_COMPRESSION_METHOD | default('brotli') }}" }
- { option: "WALG_DELTA_MAX_STEPS", value: "{{ WALG_DELTA_MAX_STEPS | default('6') }}" }
- { option: "WALG_DOWNLOAD_CONCURRENCY", value: "{{ WALG_DOWNLOAD_CONCURRENCY | default([ansible_processor_vcpus | int // 2, 1] | max) }}" }
- { option: "WALG_UPLOAD_CONCURRENCY", value: "{{ WALG_UPLOAD_CONCURRENCY | default([ansible_processor_vcpus | int // 2, 1] | max) }}" }
- { option: "WALG_UPLOAD_DISK_CONCURRENCY", value: "{{ WALG_UPLOAD_DISK_CONCURRENCY | default([ansible_processor_vcpus | int // 2, 1] | max) }}" }
- { option: "PGDATA", value: "{{ postgresql_data_dir }}" }
- { option: "PGHOST", value: "{{ postgresql_unix_socket_dir | default('/var/run/postgresql') }}" }
- { option: "PGPORT", value: "{{ postgresql_port | default('5432') }}" }
- { option: "PGUSER", value: "{{ patroni_superuser_username | default('postgres') }}" }
delegate_to: localhost
run_once: true # noqa run-once
no_log: true # do not output contents to the ansible log
25 changes: 25 additions & 0 deletions automation/roles/backup/tasks/main.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
---
- name: Import tasks for aws
ansible.builtin.import_tasks: aws.yml
when: backup_provider == 'aws'
tags: pgbackrest, wal-g, wal_g

- name: Import tasks for azure
ansible.builtin.import_tasks: azure.yml
when: backup_provider == 'azure'
tags: pgbackrest, wal-g, wal_g

- name: Import tasks for digitalocean
ansible.builtin.import_tasks: digitalocean.yml
when: backup_provider == 'digitalocean'
tags: pgbackrest, wal-g, wal_g

- name: Import tasks for gcp
ansible.builtin.import_tasks: gcp.yml
when: backup_provider == 'gcp'
tags: pgbackrest, wal-g, wal_g

- name: Import tasks for hetzner
ansible.builtin.import_tasks: hetzner.yml
when: backup_provider == 'hetzner'
tags: pgbackrest, wal-g, wal_g
Loading
Loading