]> cat aescling's git repositories - mastodon.git/commitdiff
[nanobox] Add Automated Backups (#4023)
authorDaniel Hunsaker <danhunsaker@gmail.com>
Thu, 6 Jul 2017 22:46:45 +0000 (16:46 -0600)
committerEugen Rochko <eugen@zeonfederated.com>
Thu, 6 Jul 2017 22:46:45 +0000 (00:46 +0200)
This PR adds automatic backups to Nanobox instances. The database, Redis, and user files are backed up every day at 03:00 (server time) to the data warehouse component which comes with every Nanobox app. Old backups are automatically cleared out, but the number of backups that are left untouched can be configured by setting the `BACKUP_COUNT` environment variable to any integer value greater than 0 (the default is 1).

Also updated `.env.nanobox` to reflect the current `.env.production.sample`.

.env.nanobox
boxfile.yml

index 73abefdc657537d627e93f5506aec37004a1faa1..7920c47b95d8a36ee42892e05201831a0b9a4dda 100644 (file)
@@ -69,7 +69,7 @@ SMTP_FROM_ADDRESS=notifications@${APP_NAME}.nanoapp.io
 # PAPERCLIP_ROOT_URL=/system
 
 # Optional asset host for multi-server setups
-# CDN_HOST=assets.example.com
+# CDN_HOST=https://assets.example.com
 
 # S3 (optional)
 # S3_ENABLED=true
index ef847d4a036d214e8dd86538d332fa800b459b19..3302231109e1a6a2b026656c369bded530cffe8f 100644 (file)
@@ -153,8 +153,59 @@ worker.sidekiq:
 data.db:
   image: nanobox/postgresql:9.5
 
+  cron:
+    - id: backup
+      schedule: '0 3 * * *'
+      command: |
+        PGPASSWORD=${DATA_POSTGRES_PASS} pg_dump -U ${DATA_POSTGRES_USER} -w -Fc -O gonano |
+        gzip |
+        curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).sql.gz --data-binary @- &&
+        curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
+        json_pp |
+        grep ${HOSTNAME} |
+        sort |
+        head -n-${BACKUP_COUNT:-1} |
+        sed 's/.*: "\(.*\)".*/\1/' |
+        while read file
+        do
+          curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE
+        done
+
 data.redis:
   image: nanobox/redis:3.0
 
+  cron:
+    - id: backup
+      schedule: '0 3 * * *'
+      command: |
+        curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).rdb --data-binary @/data/var/db/redis/dump.rdb &&
+        curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
+        json_pp |
+        grep ${HOSTNAME} |
+        sort |
+        head -n-${BACKUP_COUNT:-1} |
+        sed 's/.*: "\(.*\)".*/\1/' |
+        while read file
+        do
+          curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE
+        done
+
 data.storage:
   image: nanobox/unfs:0.9
+
+  cron:
+    - id: backup
+      schedule: '0 3 * * *'
+      command: |
+        tar cz -C /data/var/db/unfs/ |
+        curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).tgz --data-binary @- &&
+        curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
+        json_pp |
+        grep ${HOSTNAME} |
+        sort |
+        head -n-${BACKUP_COUNT:-1} |
+        sed 's/.*: "\(.*\)".*/\1/' |
+        while read file
+        do
+          curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE
+        done