Skip to content

Commit

Permalink
fix list remote command date parsing for all remote_storage: custom
Browse files Browse the repository at this point in the history
… integration examples
  • Loading branch information
Slach committed Dec 1, 2023
1 parent 54d5617 commit 50cf3bf
Show file tree
Hide file tree
Showing 6 changed files with 14 additions and 9 deletions.
4 changes: 4 additions & 0 deletions ChangeLog.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# v2.4.9
BUG FIXES
- fix list remote command date parsing for all `remote_storage: custom` integration examples

# v2.4.8
BUG FIXES
- fix Object Disks path parsing from config, remove unnecessary "/"
Expand Down
6 changes: 3 additions & 3 deletions pkg/storage/general.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ type Backup struct {
Legacy bool
FileExtension string
Broken string
UploadDate time.Time
UploadDate time.Time `json:"upload_date"`
}

type BackupDestination struct {
Expand Down Expand Up @@ -633,7 +633,7 @@ func NewBackupDestination(ctx context.Context, cfg *config.Config, ch *clickhous
// https://github.com/Altinity/clickhouse-backup/issues/317
if bufferSize <= 0 {
bufferSize = int(cfg.General.MaxFileSize) / cfg.AzureBlob.MaxPartsCount
if int(cfg.General.MaxFileSize) % cfg.AzureBlob.MaxPartsCount > 0 {
if int(cfg.General.MaxFileSize)%cfg.AzureBlob.MaxPartsCount > 0 {
bufferSize++
}
if bufferSize < 2*1024*1024 {
Expand All @@ -655,7 +655,7 @@ func NewBackupDestination(ctx context.Context, cfg *config.Config, ch *clickhous
partSize := cfg.S3.PartSize
if cfg.S3.PartSize <= 0 {
partSize = cfg.General.MaxFileSize / cfg.S3.MaxPartsCount
if cfg.General.MaxFileSize % cfg.S3.MaxPartsCount > 0 {
if cfg.General.MaxFileSize%cfg.S3.MaxPartsCount > 0 {
partSize++
}
if partSize < 5*1024*1024 {
Expand Down
1 change: 1 addition & 0 deletions test/integration/install_delve.sh
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ CGO_ENABLED=0 GO111MODULE=on go install -ldflags "-s -w -extldflags '-static'" g
# LOG_LEVEL=debug /root/go/bin/dlv --listen=:40001 --headless=true --api-version=2 --accept-multiclient exec /usr/bin/clickhouse-backup -- restore --data --restore-database-mapping database1:database2 --tables database1.* test_restore_database_mapping
# S3_DEBUG=true /root/go/bin/dlv --listen=:40001 --headless=true --api-version=2 --accept-multiclient exec /usr/bin/clickhouse-backup -- upload test_rbac_backup
# S3_DEBUG=true /root/go/bin/dlv --listen=:40001 --headless=true --api-version=2 --accept-multiclient exec /usr/bin/clickhouse-backup -- -c /etc/clickhouse-backup/config-s3.yml delete remote full_backup_339504125792808941
# /root/go/bin/dlv --listen=:40001 --headless=true --api-version=2 --accept-multiclient exec /usr/bin/clickhouse-backup -- -c /etc/clickhouse-backup/config-custom-kopia.yml list remote
# run integration_test.go under debug, run from host OS not inside docker
# go test -timeout 30m -failfast -tags=integration -run "TestIntegrationEmbedded" -v ./test/integration/integration_test.go -c -o ./test/integration/integration_test
# sudo -H bash -c 'export CLICKHOUSE_IMAGE=clickhouse/clickhouse-server; export COMPOSE_FILE=docker-compose_advanced.yml; export CLICKHOUSE_VERSION=head; cd ./test/integration/; /root/go/bin/dlv --listen=127.0.0.1:40002 --headless=true --api-version=2 --accept-multiclient exec ./integration_test -- -test.timeout 30m -test.failfast -test.run "TestIntegrationEmbedded"'
Expand Down
2 changes: 1 addition & 1 deletion test/integration/kopia/list.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@ set +x
set -euo pipefail
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
source "${CUR_DIR}/init.sh" &>/dev/null
kopia snapshot list --storage-stats --json | jq -c -M '.[] | {"snapshot_id": .id, "snapshot_path": .source.path, "backup_name": .tags["tag:backup_name"], "creation_date": .endTime, "data_size": .storageStats.newData.packedContentBytes, "metadata_size": 0 }'
kopia snapshot list --storage-stats --json | jq -c -M '.[] | {"snapshot_id": .id, "snapshot_path": .source.path, "backup_name": .tags["tag:backup_name"], "creation_date": .startTime, "upload_date": .endTime, "data_size": .storageStats.newData.packedContentBytes, "metadata_size": 0 }'
4 changes: 2 additions & 2 deletions test/integration/restic/list.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@ set -euo pipefail
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
source "${CUR_DIR}/init.sh"
rm -rf /tmp/restic_list_full.json
restic snapshots --json | jq -c -M '.[] | {"snapshot_id": .short_id, "backup_name": .tags[0], "creation_date": .time }' > /tmp/restic_list.json
restic snapshots --json | jq -c -M '.[] | {"snapshot_id": .short_id, "backup_name": .tags[0], "creation_date": .time, "upload_date": .time }' > /tmp/restic_list.json
jq -c -r -M --slurp '.[].snapshot_id' /tmp/restic_list.json | while IFS= read -r snapshot_id ; do
jq -c -M -s 'add' <(grep ${snapshot_id} /tmp/restic_list.json) <(restic stats --json ${snapshot_id}) >> /tmp/restic_list_full.json
done
cat /tmp/restic_list_full.json | jq -c -M --slurp '.[] | .data_size = .total_size | .metadata_size = 0'
set -x
set -x
6 changes: 3 additions & 3 deletions test/integration/rsync/list.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ set +x
set -euo pipefail
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
source "${CUR_DIR}/init.sh"
ssh -i "${BACKUP_SSH_KEY}" -o "StrictHostKeyChecking no" "${BACKUP_REMOTE_SERVER}" ls -d -1 "${BACKUP_REMOTE_DIR}/*" | while IFS= read -r backup_name ; do
ssh -i "${BACKUP_SSH_KEY}" -o "StrictHostKeyChecking no" "${BACKUP_REMOTE_SERVER}" ls -d -1 "${BACKUP_REMOTE_DIR}/*" 2>/dev/null | while IFS= read -r backup_name ; do
backup_name=${backup_name#"$BACKUP_REMOTE_DIR"}
ssh -i "${BACKUP_SSH_KEY}" -o "StrictHostKeyChecking no" "${BACKUP_REMOTE_SERVER}" cat "${BACKUP_REMOTE_DIR}/${backup_name}/default/metadata.json" | jq -c -r -M '.'
ssh -i "${BACKUP_SSH_KEY}" -o "StrictHostKeyChecking no" "${BACKUP_REMOTE_SERVER}" cat "${BACKUP_REMOTE_DIR}/${backup_name}/default/metadata.json" 2>/dev/null | jq -c -r -M '. + {upload_date: .creation_date}'
done
set -x
set -x

0 comments on commit 50cf3bf

Please sign in to comment.