From 978d5455a094f2894e1b1de56f5755488b7efb45 Mon Sep 17 00:00:00 2001 From: Daniel Hunsaker Date: Thu, 6 Jul 2017 16:46:45 -0600 Subject: [PATCH] [nanobox] Add Automated Backups (#4023) This PR adds automatic backups to Nanobox instances. The database, Redis, and user files are backed up every day at 03:00 (server time) to the data warehouse component which comes with every Nanobox app. Old backups are automatically cleared out, but the number of backups that are left untouched can be configured by setting the `BACKUP_COUNT` environment variable to any integer value greater than 0 (the default is 1). Also updated `.env.nanobox` to reflect the current `.env.production.sample`. --- .env.nanobox | 2 +- boxfile.yml | 51 +++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 52 insertions(+), 1 deletion(-) diff --git a/.env.nanobox b/.env.nanobox index 73abefdc65..7920c47b95 100644 --- a/.env.nanobox +++ b/.env.nanobox @@ -69,7 +69,7 @@ SMTP_FROM_ADDRESS=notifications@${APP_NAME}.nanoapp.io # PAPERCLIP_ROOT_URL=/system # Optional asset host for multi-server setups -# CDN_HOST=assets.example.com +# CDN_HOST=https://assets.example.com # S3 (optional) # S3_ENABLED=true diff --git a/boxfile.yml b/boxfile.yml index ef847d4a03..3302231109 100644 --- a/boxfile.yml +++ b/boxfile.yml @@ -153,8 +153,59 @@ worker.sidekiq: data.db: image: nanobox/postgresql:9.5 + cron: + - id: backup + schedule: '0 3 * * *' + command: | + PGPASSWORD=${DATA_POSTGRES_PASS} pg_dump -U ${DATA_POSTGRES_USER} -w -Fc -O gonano | + gzip | + curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).sql.gz --data-binary @- && + curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ | + json_pp | + grep ${HOSTNAME} | + sort | + head -n-${BACKUP_COUNT:-1} | + sed 's/.*: "\(.*\)".*/\1/' | + while read file + do + curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE + done + data.redis: image: nanobox/redis:3.0 + cron: + - id: backup + schedule: '0 3 * * *' + command: | + curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).rdb --data-binary @/data/var/db/redis/dump.rdb && + curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ | + json_pp | + grep ${HOSTNAME} | + sort | + head -n-${BACKUP_COUNT:-1} | + sed 's/.*: "\(.*\)".*/\1/' | + while read file + do + curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE + done + data.storage: image: nanobox/unfs:0.9 + + cron: + - id: backup + schedule: '0 3 * * *' + command: | + tar cz -C /data/var/db/unfs/ | + curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).tgz --data-binary @- && + curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ | + json_pp | + grep ${HOSTNAME} | + sort | + head -n-${BACKUP_COUNT:-1} | + sed 's/.*: "\(.*\)".*/\1/' | + while read file + do + curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE + done