From 50cf3bf4f75bd40aaed6d0a7cc733cc4ab64fe1d Mon Sep 17 00:00:00 2001 From: Slach Date: Fri, 1 Dec 2023 22:45:42 +0400 Subject: [PATCH] fix list remote command date parsing for all `remote_storage: custom` integration examples --- ChangeLog.md | 4 ++++ pkg/storage/general.go | 6 +++--- test/integration/install_delve.sh | 1 + test/integration/kopia/list.sh | 2 +- test/integration/restic/list.sh | 4 ++-- test/integration/rsync/list.sh | 6 +++--- 6 files changed, 14 insertions(+), 9 deletions(-) diff --git a/ChangeLog.md b/ChangeLog.md index efc28390..e9333143 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -1,3 +1,7 @@ +# v2.4.9 +BUG FIXES +- fix list remote command date parsing for all `remote_storage: custom` integration examples + # v2.4.8 BUG FIXES - fix Object Disks path parsing from config, remove unnecessary "/" diff --git a/pkg/storage/general.go b/pkg/storage/general.go index 5f5fd051..608d58d3 100644 --- a/pkg/storage/general.go +++ b/pkg/storage/general.go @@ -44,7 +44,7 @@ type Backup struct { Legacy bool FileExtension string Broken string - UploadDate time.Time + UploadDate time.Time `json:"upload_date"` } type BackupDestination struct { @@ -633,7 +633,7 @@ func NewBackupDestination(ctx context.Context, cfg *config.Config, ch *clickhous // https://github.com/Altinity/clickhouse-backup/issues/317 if bufferSize <= 0 { bufferSize = int(cfg.General.MaxFileSize) / cfg.AzureBlob.MaxPartsCount - if int(cfg.General.MaxFileSize) % cfg.AzureBlob.MaxPartsCount > 0 { + if int(cfg.General.MaxFileSize)%cfg.AzureBlob.MaxPartsCount > 0 { bufferSize++ } if bufferSize < 2*1024*1024 { @@ -655,7 +655,7 @@ func NewBackupDestination(ctx context.Context, cfg *config.Config, ch *clickhous partSize := cfg.S3.PartSize if cfg.S3.PartSize <= 0 { partSize = cfg.General.MaxFileSize / cfg.S3.MaxPartsCount - if cfg.General.MaxFileSize % cfg.S3.MaxPartsCount > 0 { + if cfg.General.MaxFileSize%cfg.S3.MaxPartsCount > 0 { partSize++ } if partSize < 5*1024*1024 { diff --git a/test/integration/install_delve.sh b/test/integration/install_delve.sh index 5934fd07..ded0c69f 100755 --- a/test/integration/install_delve.sh +++ b/test/integration/install_delve.sh @@ -27,6 +27,7 @@ CGO_ENABLED=0 GO111MODULE=on go install -ldflags "-s -w -extldflags '-static'" g # LOG_LEVEL=debug /root/go/bin/dlv --listen=:40001 --headless=true --api-version=2 --accept-multiclient exec /usr/bin/clickhouse-backup -- restore --data --restore-database-mapping database1:database2 --tables database1.* test_restore_database_mapping # S3_DEBUG=true /root/go/bin/dlv --listen=:40001 --headless=true --api-version=2 --accept-multiclient exec /usr/bin/clickhouse-backup -- upload test_rbac_backup # S3_DEBUG=true /root/go/bin/dlv --listen=:40001 --headless=true --api-version=2 --accept-multiclient exec /usr/bin/clickhouse-backup -- -c /etc/clickhouse-backup/config-s3.yml delete remote full_backup_339504125792808941 +# /root/go/bin/dlv --listen=:40001 --headless=true --api-version=2 --accept-multiclient exec /usr/bin/clickhouse-backup -- -c /etc/clickhouse-backup/config-custom-kopia.yml list remote # run integration_test.go under debug, run from host OS not inside docker # go test -timeout 30m -failfast -tags=integration -run "TestIntegrationEmbedded" -v ./test/integration/integration_test.go -c -o ./test/integration/integration_test # sudo -H bash -c 'export CLICKHOUSE_IMAGE=clickhouse/clickhouse-server; export COMPOSE_FILE=docker-compose_advanced.yml; export CLICKHOUSE_VERSION=head; cd ./test/integration/; /root/go/bin/dlv --listen=127.0.0.1:40002 --headless=true --api-version=2 --accept-multiclient exec ./integration_test -- -test.timeout 30m -test.failfast -test.run "TestIntegrationEmbedded"' diff --git a/test/integration/kopia/list.sh b/test/integration/kopia/list.sh index ac02e0a0..4bb4ce2c 100755 --- a/test/integration/kopia/list.sh +++ b/test/integration/kopia/list.sh @@ -3,4 +3,4 @@ set +x set -euo pipefail CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" source "${CUR_DIR}/init.sh" &>/dev/null -kopia snapshot list --storage-stats --json | jq -c -M '.[] | {"snapshot_id": .id, "snapshot_path": .source.path, "backup_name": .tags["tag:backup_name"], "creation_date": .endTime, "data_size": .storageStats.newData.packedContentBytes, "metadata_size": 0 }' \ No newline at end of file +kopia snapshot list --storage-stats --json | jq -c -M '.[] | {"snapshot_id": .id, "snapshot_path": .source.path, "backup_name": .tags["tag:backup_name"], "creation_date": .startTime, "upload_date": .endTime, "data_size": .storageStats.newData.packedContentBytes, "metadata_size": 0 }' diff --git a/test/integration/restic/list.sh b/test/integration/restic/list.sh index c4893bb1..af79ce5e 100755 --- a/test/integration/restic/list.sh +++ b/test/integration/restic/list.sh @@ -4,9 +4,9 @@ set -euo pipefail CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" source "${CUR_DIR}/init.sh" rm -rf /tmp/restic_list_full.json -restic snapshots --json | jq -c -M '.[] | {"snapshot_id": .short_id, "backup_name": .tags[0], "creation_date": .time }' > /tmp/restic_list.json +restic snapshots --json | jq -c -M '.[] | {"snapshot_id": .short_id, "backup_name": .tags[0], "creation_date": .time, "upload_date": .time }' > /tmp/restic_list.json jq -c -r -M --slurp '.[].snapshot_id' /tmp/restic_list.json | while IFS= read -r snapshot_id ; do jq -c -M -s 'add' <(grep ${snapshot_id} /tmp/restic_list.json) <(restic stats --json ${snapshot_id}) >> /tmp/restic_list_full.json done cat /tmp/restic_list_full.json | jq -c -M --slurp '.[] | .data_size = .total_size | .metadata_size = 0' -set -x \ No newline at end of file +set -x diff --git a/test/integration/rsync/list.sh b/test/integration/rsync/list.sh index 5ba42433..26dca677 100755 --- a/test/integration/rsync/list.sh +++ b/test/integration/rsync/list.sh @@ -3,8 +3,8 @@ set +x set -euo pipefail CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" source "${CUR_DIR}/init.sh" -ssh -i "${BACKUP_SSH_KEY}" -o "StrictHostKeyChecking no" "${BACKUP_REMOTE_SERVER}" ls -d -1 "${BACKUP_REMOTE_DIR}/*" | while IFS= read -r backup_name ; do +ssh -i "${BACKUP_SSH_KEY}" -o "StrictHostKeyChecking no" "${BACKUP_REMOTE_SERVER}" ls -d -1 "${BACKUP_REMOTE_DIR}/*" 2>/dev/null | while IFS= read -r backup_name ; do backup_name=${backup_name#"$BACKUP_REMOTE_DIR"} - ssh -i "${BACKUP_SSH_KEY}" -o "StrictHostKeyChecking no" "${BACKUP_REMOTE_SERVER}" cat "${BACKUP_REMOTE_DIR}/${backup_name}/default/metadata.json" | jq -c -r -M '.' + ssh -i "${BACKUP_SSH_KEY}" -o "StrictHostKeyChecking no" "${BACKUP_REMOTE_SERVER}" cat "${BACKUP_REMOTE_DIR}/${backup_name}/default/metadata.json" 2>/dev/null | jq -c -r -M '. + {upload_date: .creation_date}' done -set -x \ No newline at end of file +set -x