Skip to content

Commit

Permalink
change test order
Browse files Browse the repository at this point in the history
  • Loading branch information
Slach committed Aug 29, 2024
1 parent 6a02bca commit e19019f
Showing 1 changed file with 60 additions and 59 deletions.
119 changes: 60 additions & 59 deletions test/integration/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -554,6 +554,49 @@ func TestLongListRemote(t *testing.T) {
env.Cleanup(t, r)
}

func TestIntegrationEmbedded(t *testing.T) {
version := os.Getenv("CLICKHOUSE_VERSION")
if compareVersion(version, "23.3") < 0 {
t.Skipf("Test skipped, BACKUP/RESTORE not production ready for %s version, look https://github.com/ClickHouse/ClickHouse/issues/39416 for details", version)
}
t.Logf("@TODO RESTORE Ordinary with old syntax still not works for %s version, look https://github.com/ClickHouse/ClickHouse/issues/43971", os.Getenv("CLICKHOUSE_VERSION"))
env, r := NewTestEnvironment(t)

// === AZURE ===
// CUSTOM backup create folder in each disk
env.DockerExecNoError(r, "clickhouse", "rm", "-rf", "/var/lib/clickhouse/disks/backups_azure/backup/")
if compareVersion(version, "24.8") >= 0 {
env.runMainIntegrationScenario(t, "EMBEDDED_AZURE_URL", "config-azblob-embedded-url.yml")
}
env.runMainIntegrationScenario(t, "EMBEDDED_AZURE", "config-azblob-embedded.yml")

// === GCS over S3 ===
if compareVersion(version, "24.3") >= 0 && os.Getenv("QA_GCS_OVER_S3_BUCKET") != "" {
//@todo think about named collections to avoid show credentials in logs look to https://github.com/fsouza/fake-gcs-server/issues/1330, https://github.com/fsouza/fake-gcs-server/pull/1164
env.InstallDebIfNotExists(r, "clickhouse-backup", "ca-certificates", "gettext-base")
env.DockerExecNoError(r, "clickhouse-backup", "bash", "-xec", "cat /etc/clickhouse-backup/config-gcs-embedded-url.yml.template | envsubst > /etc/clickhouse-backup/config-gcs-embedded-url.yml")
env.runMainIntegrationScenario(t, "EMBEDDED_GCS_URL", "config-gcs-embedded-url.yml")
}

// === S3 ===
// CUSTOM backup creates folder in each disk, need to clear
env.DockerExecNoError(r, "clickhouse", "rm", "-rfv", "/var/lib/clickhouse/disks/backups_s3/backup/")
env.runMainIntegrationScenario(t, "EMBEDDED_S3", "config-s3-embedded.yml")

if compareVersion(version, "23.8") >= 0 {
//CUSTOM backup creates folder in each disk, need to clear
env.DockerExecNoError(r, "clickhouse", "rm", "-rfv", "/var/lib/clickhouse/disks/backups_local/backup/")
env.runMainIntegrationScenario(t, "EMBEDDED_LOCAL", "config-s3-embedded-local.yml")
}
if compareVersion(version, "24.3") >= 0 {
env.runMainIntegrationScenario(t, "EMBEDDED_S3_URL", "config-s3-embedded-url.yml")
}
//@TODO think about how to implements embedded backup for s3_plain disks
//env.DockerExecNoError(r, "clickhouse", "rm", "-rf", "/var/lib/clickhouse/disks/backups_s3_plain/backup/")
//runMainIntegrationScenario(t, "EMBEDDED_S3_PLAIN", "config-s3-plain-embedded.yml")
env.Cleanup(t, r)
}

func TestIntegrationAzure(t *testing.T) {
if isTestShouldSkip("AZURE_TESTS") {
t.Skip("Skipping Azure integration tests...")
Expand All @@ -574,6 +617,23 @@ func TestIntegrationGCSWithCustomEndpoint(t *testing.T) {
env.Cleanup(t, r)
}

func TestIntegrationS3(t *testing.T) {
env, r := NewTestEnvironment(t)
env.checkObjectStorageIsEmpty(t, r, "S3")
env.runMainIntegrationScenario(t, "S3", "config-s3.yml")
env.Cleanup(t, r)
}

func TestIntegrationGCS(t *testing.T) {
if isTestShouldSkip("GCS_TESTS") {
t.Skip("Skipping GCS integration tests...")
return
}
env, r := NewTestEnvironment(t)
env.runMainIntegrationScenario(t, "GCS", "config-gcs.yml")
env.Cleanup(t, r)
}

func TestIntegrationSFTPAuthKey(t *testing.T) {
env, r := NewTestEnvironment(t)
env.uploadSSHKeys(r, "clickhouse-backup")
Expand Down Expand Up @@ -613,65 +673,6 @@ func TestIntegrationS3Glacier(t *testing.T) {
env.Cleanup(t, r)
}

func TestIntegrationS3(t *testing.T) {
env, r := NewTestEnvironment(t)
env.checkObjectStorageIsEmpty(t, r, "S3")
env.runMainIntegrationScenario(t, "S3", "config-s3.yml")
env.Cleanup(t, r)
}

func TestIntegrationGCS(t *testing.T) {
if isTestShouldSkip("GCS_TESTS") {
t.Skip("Skipping GCS integration tests...")
return
}
env, r := NewTestEnvironment(t)
env.runMainIntegrationScenario(t, "GCS", "config-gcs.yml")
env.Cleanup(t, r)
}

func TestIntegrationEmbedded(t *testing.T) {
version := os.Getenv("CLICKHOUSE_VERSION")
if compareVersion(version, "23.3") < 0 {
t.Skipf("Test skipped, BACKUP/RESTORE not production ready for %s version, look https://github.com/ClickHouse/ClickHouse/issues/39416 for details", version)
}
t.Logf("@TODO RESTORE Ordinary with old syntax still not works for %s version, look https://github.com/ClickHouse/ClickHouse/issues/43971", os.Getenv("CLICKHOUSE_VERSION"))
env, r := NewTestEnvironment(t)

// === AZURE ===
// CUSTOM backup create folder in each disk
env.DockerExecNoError(r, "clickhouse", "rm", "-rf", "/var/lib/clickhouse/disks/backups_azure/backup/")
if compareVersion(version, "24.8") >= 0 {
env.runMainIntegrationScenario(t, "EMBEDDED_AZURE_URL", "config-azblob-embedded-url.yml")
}
env.runMainIntegrationScenario(t, "EMBEDDED_AZURE", "config-azblob-embedded.yml")

// === GCS over S3 ===
if compareVersion(version, "24.3") >= 0 && os.Getenv("QA_GCS_OVER_S3_BUCKET") != "" {
//@todo think about named collections to avoid show credentials in logs look to https://github.com/fsouza/fake-gcs-server/issues/1330, https://github.com/fsouza/fake-gcs-server/pull/1164
env.InstallDebIfNotExists(r, "clickhouse-backup", "ca-certificates", "gettext-base")
env.DockerExecNoError(r, "clickhouse-backup", "bash", "-xec", "cat /etc/clickhouse-backup/config-gcs-embedded-url.yml.template | envsubst > /etc/clickhouse-backup/config-gcs-embedded-url.yml")
env.runMainIntegrationScenario(t, "EMBEDDED_GCS_URL", "config-gcs-embedded-url.yml")
}

// === S3 ===
// CUSTOM backup creates folder in each disk, need to clear
env.DockerExecNoError(r, "clickhouse", "rm", "-rfv", "/var/lib/clickhouse/disks/backups_s3/backup/")
env.runMainIntegrationScenario(t, "EMBEDDED_S3", "config-s3-embedded.yml")

if compareVersion(version, "23.8") >= 0 {
//CUSTOM backup creates folder in each disk, need to clear
env.DockerExecNoError(r, "clickhouse", "rm", "-rfv", "/var/lib/clickhouse/disks/backups_local/backup/")
env.runMainIntegrationScenario(t, "EMBEDDED_LOCAL", "config-s3-embedded-local.yml")
}
if compareVersion(version, "24.3") >= 0 {
env.runMainIntegrationScenario(t, "EMBEDDED_S3_URL", "config-s3-embedded-url.yml")
}
//@TODO think about how to implements embedded backup for s3_plain disks
//env.DockerExecNoError(r, "clickhouse", "rm", "-rf", "/var/lib/clickhouse/disks/backups_s3_plain/backup/")
//runMainIntegrationScenario(t, "EMBEDDED_S3_PLAIN", "config-s3-plain-embedded.yml")
env.Cleanup(t, r)
}

func TestIntegrationCustomKopia(t *testing.T) {
env, r := NewTestEnvironment(t)
Expand Down

0 comments on commit e19019f

Please sign in to comment.