logo

mastofe

My custom branche(s) on git.pleroma.social/pleroma/mastofe
commit: 185b41beb4adea5f0b55f56c0898bef74fd35435
parent: 2083000027451249836c369eb961300d7faf5f99
Author: Daniel Hunsaker <danhunsaker@gmail.com>
Date:   Thu,  6 Jul 2017 16:46:45 -0600

[nanobox] Add Automated Backups (#4023)

This PR adds automatic backups to Nanobox instances. The database, Redis, and user files are backed up every day at 03:00 (server time) to the data warehouse component which comes with every Nanobox app. Old backups are automatically cleared out, but the number of backups that are left untouched can be configured by setting the `BACKUP_COUNT` environment variable to any integer value greater than 0 (the default is 1).

Also updated `.env.nanobox` to reflect the current `.env.production.sample`.

Diffstat:

M.env.nanobox2+-
Mboxfile.yml51+++++++++++++++++++++++++++++++++++++++++++++++++++
2 files changed, 52 insertions(+), 1 deletion(-)

diff --git a/.env.nanobox b/.env.nanobox @@ -69,7 +69,7 @@ SMTP_FROM_ADDRESS=notifications@${APP_NAME}.nanoapp.io # PAPERCLIP_ROOT_URL=/system # Optional asset host for multi-server setups -# CDN_HOST=assets.example.com +# CDN_HOST=https://assets.example.com # S3 (optional) # S3_ENABLED=true diff --git a/boxfile.yml b/boxfile.yml @@ -153,8 +153,59 @@ worker.sidekiq: data.db: image: nanobox/postgresql:9.5 + cron: + - id: backup + schedule: '0 3 * * *' + command: | + PGPASSWORD=${DATA_POSTGRES_PASS} pg_dump -U ${DATA_POSTGRES_USER} -w -Fc -O gonano | + gzip | + curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).sql.gz --data-binary @- && + curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ | + json_pp | + grep ${HOSTNAME} | + sort | + head -n-${BACKUP_COUNT:-1} | + sed 's/.*: "\(.*\)".*/\1/' | + while read file + do + curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE + done + data.redis: image: nanobox/redis:3.0 + cron: + - id: backup + schedule: '0 3 * * *' + command: | + curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).rdb --data-binary @/data/var/db/redis/dump.rdb && + curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ | + json_pp | + grep ${HOSTNAME} | + sort | + head -n-${BACKUP_COUNT:-1} | + sed 's/.*: "\(.*\)".*/\1/' | + while read file + do + curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE + done + data.storage: image: nanobox/unfs:0.9 + + cron: + - id: backup + schedule: '0 3 * * *' + command: | + tar cz -C /data/var/db/unfs/ | + curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).tgz --data-binary @- && + curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ | + json_pp | + grep ${HOSTNAME} | + sort | + head -n-${BACKUP_COUNT:-1} | + sed 's/.*: "\(.*\)".*/\1/' | + while read file + do + curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE + done