Skip to content

Commit

Permalink
Merge pull request #168 from ar-io/feature/PE-6332_prefixing
Browse files Browse the repository at this point in the history
feat(litestream): support path prefixing the backed up data in s3
  • Loading branch information
hlolli authored Jul 15, 2024
2 parents 3629e9c + 76c7a2c commit 6080b99
Show file tree
Hide file tree
Showing 4 changed files with 47 additions and 13 deletions.
1 change: 1 addition & 0 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,7 @@ services:
- TVAL_AR_IO_SQLITE_BACKUP_S3_BUCKET_REGION=${AR_IO_SQLITE_BACKUP_S3_BUCKET_REGION:-}
- TVAL_AR_IO_SQLITE_BACKUP_S3_BUCKET_ACCESS_KEY=${AR_IO_SQLITE_BACKUP_S3_BUCKET_ACCESS_KEY:-}
- TVAL_AR_IO_SQLITE_BACKUP_S3_BUCKET_SECRET_KEY=${AR_IO_SQLITE_BACKUP_S3_BUCKET_SECRET_KEY:-}
- TVAL_AR_IO_SQLITE_BACKUP_S3_BUCKET_PREFIX=${AR_IO_SQLITE_BACKUP_S3_BUCKET_PREFIX:-}

upload-service:
image: ghcr.io/ardriveapp/turbo-upload-service:${UPLOAD_SERVICE_IMAGE_TAG:-fd1032e3012c0179577b08bbfe932f4ad7d805cd}
Expand Down
1 change: 1 addition & 0 deletions docs/envs.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ This document describes the environment variables that can be used to configure
| AR_IO_SQLITE_BACKUP_S3_BUCKET_REGION | String | "" | S3-compatible bucket region, used by the Litestream backup service |
| AR_IO_SQLITE_BACKUP_S3_BUCKET_ACCESS_KEY | String | "" | S3-compatible bucket access_key credential, used by Litestream backup service, omit if using resource-based IAM role |
| AR_IO_SQLITE_BACKUP_S3_BUCKET_SECRET_KEY | String | "" | S3-compatible bucket access_secret_key credential, used by Litestream backup service, omit if using resource-based IAM role |
| AR_IO_SQLITE_BACKUP_S3_BUCKET_PREFIX | String | "" | A prepended prefix for the S3 bucket where SQLite backups are stored. |
| AWS_ACCESS_KEY_ID | String | undefined | AWS access key ID for accessing AWS services |
| AWS_SECRET_ACCESS_KEY | String | undefined | AWS secret access key for accessing AWS services |
| AWS_REGION | String | undefined | AWS region where the resources are located |
Expand Down
9 changes: 9 additions & 0 deletions litestream/docker-entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,13 @@ ytt -f /etc/litestream.template.yaml --data-values-env TVAL > /etc/litestream.y

chmod go+r /etc/litestream.yml

if [ -n "$AR_IO_SQLITE_RESTORE_FROM_BACKUP" ]; then
echo "Attempting to restore from backup if exists..."
# TODO: uncomment this once core.db upload issue is resolved
# /usr/local/bin/litestream restore -config /etc/litestream.yml -if-db-not-exists -if-replica-exists /app/data/sqlite/core.db
/usr/local/bin/litestream restore -config /etc/litestream.yml -if-db-not-exists -if-replica-exists /app/data/sqlite/data.db
/usr/local/bin/litestream restore -config /etc/litestream.yml -if-db-not-exists -if-replica-exists /app/data/sqlite/moderation.db
/usr/local/bin/litestream restore -config /etc/litestream.yml -if-db-not-exists -if-replica-exists /app/data/sqlite/bundles.db
fi

/usr/local/bin/litestream replicate -config /etc/litestream.yml
49 changes: 36 additions & 13 deletions litestream/litestream.template.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,33 @@ secret-access-key: #@ data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_SECRET_KEY
#@ end

dbs:
- path: /app/data/sqlite/core.db
replicas:
- type: s3
name: ar_io_node_core.backup.db
bucket: #@ data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_NAME
region: #@ data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_REGION
forcePathStyle: true
sync-interval: 1s
snapshot-interval: 24h
retention: 168h
# Hlöðver: currently we are dealing with a bug uploading core.db
# due to its size. A fix is underway, this is temporary.
#
# - path: /app/data/sqlite/core.db
# replicas:
# - type: s3
# #@ if data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_PREFIX != "":
# path: #@ data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_PREFIX + "/ar_io_node_core.backup.db"
# #@ end
# #@ if data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_PREFIX == "":
# path: ar_io_node_core.backup.db
# #@ end
# bucket: #@ data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_NAME
# region: #@ data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_REGION
# forcePathStyle: true
# sync-interval: 1s
# snapshot-interval: 24h
# retention: 168h
- path: /app/data/sqlite/data.db
replicas:
- type: s3
name: ar_io_node_data.backup.db
#@ if data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_PREFIX != "":
path: #@ data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_PREFIX + "/ar_io_node_data.backup.db"
#@ end
#@ if data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_PREFIX == "":
path: ar_io_node_data.backup.db
#@ end
bucket: #@ data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_NAME
region: #@ data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_REGION
forcePathStyle: true
Expand All @@ -29,7 +42,12 @@ dbs:
- path: /app/data/sqlite/moderation.db
replicas:
- type: s3
name: ar_io_node_moderation.backup.db
#@ if data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_PREFIX != "":
path: #@ data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_PREFIX + "/ar_io_node_moderation.backup.db"
#@ end
#@ if data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_PREFIX == "":
path: ar_io_node_moderation.backup.db
#@ end
bucket: #@ data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_NAME
region: #@ data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_REGION
forcePathStyle: true
Expand All @@ -39,7 +57,12 @@ dbs:
- path: /app/data/sqlite/bundles.db
replicas:
- type: s3
name: ar_io_node_bundles.backup.db
#@ if data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_PREFIX != "":
path: #@ data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_PREFIX + "/ar_io_node_bundles.backup.db"
#@ end
#@ if data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_PREFIX == "":
path: ar_io_node_bundles.backup.db
#@ end
bucket: #@ data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_NAME
region: #@ data.values.AR_IO_SQLITE_BACKUP_S3_BUCKET_REGION
forcePathStyle: true
Expand Down

0 comments on commit 6080b99

Please sign in to comment.