diff --git a/airbyte-cdk/java/airbyte-cdk/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java b/airbyte-cdk/java/airbyte-cdk/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java index 9b64edc1f99c..06caa5bd6387 100644 --- a/airbyte-cdk/java/airbyte-cdk/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java +++ b/airbyte-cdk/java/airbyte-cdk/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java @@ -22,6 +22,7 @@ public class EnvVariableFeatureFlags implements FeatureFlags { public static final String CONCURRENT_SOURCE_STREAM_READ = "CONCURRENT_SOURCE_STREAM_READ"; public static final String STRICT_COMPARISON_NORMALIZATION_WORKSPACES = "STRICT_COMPARISON_NORMALIZATION_WORKSPACES"; public static final String STRICT_COMPARISON_NORMALIZATION_TAG = "STRICT_COMPARISON_NORMALIZATION_TAG"; + public static final String DEPLOYMENT_MODE = "DEPLOYMENT_MODE"; @Override public boolean useStreamCapableState() { @@ -63,6 +64,11 @@ public String strictComparisonNormalizationTag() { return getEnvOrDefault(STRICT_COMPARISON_NORMALIZATION_TAG, "strict_comparison2", (arg) -> arg); } + @Override + public String deploymentMode() { + return getEnvOrDefault(DEPLOYMENT_MODE, "", (arg) -> arg); + } + // TODO: refactor in order to use the same method than the ones in EnvConfigs.java public T getEnvOrDefault(final String key, final T defaultValue, final Function parser) { final String value = System.getenv(key); diff --git a/airbyte-cdk/java/airbyte-cdk/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java b/airbyte-cdk/java/airbyte-cdk/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java index b3da9ac764bb..cf35d83a4ff6 100644 --- a/airbyte-cdk/java/airbyte-cdk/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java +++ b/airbyte-cdk/java/airbyte-cdk/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java @@ -49,4 +49,11 @@ public interface FeatureFlags { */ String strictComparisonNormalizationTag(); + /** + * Get the deployment mode used to deploy a connector. + * + * @return empty string for the default deployment mode, "CLOUD" for cloud deployment mode. + */ + String deploymentMode(); + } diff --git a/airbyte-cdk/java/airbyte-cdk/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlagsWrapper.java b/airbyte-cdk/java/airbyte-cdk/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlagsWrapper.java new file mode 100644 index 000000000000..624783f2104a --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlagsWrapper.java @@ -0,0 +1,92 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.features; + +public class FeatureFlagsWrapper implements FeatureFlags { + + /** + * Overrides the {@link FeatureFlags#useStreamCapableState} method in the feature flags. + */ + static public FeatureFlags overridingUseStreamCapableState( + final FeatureFlags wrapped, + final boolean useStreamCapableState) { + return new FeatureFlagsWrapper(wrapped) { + + @Override + public boolean useStreamCapableState() { + return useStreamCapableState; + } + + }; + } + + /** + * Overrides the {@link FeatureFlags#deploymentMode} method in the feature flags. + */ + static public FeatureFlags overridingDeploymentMode( + final FeatureFlags wrapped, + final String deploymentMode) { + return new FeatureFlagsWrapper(wrapped) { + + @Override + public String deploymentMode() { + return deploymentMode; + } + + }; + } + + private final FeatureFlags wrapped; + + public FeatureFlagsWrapper(FeatureFlags wrapped) { + this.wrapped = wrapped; + } + + @Override + public boolean useStreamCapableState() { + return wrapped.useStreamCapableState(); + } + + @Override + public boolean autoDetectSchema() { + return wrapped.autoDetectSchema(); + } + + @Override + public boolean logConnectorMessages() { + return wrapped.logConnectorMessages(); + } + + @Override + public boolean concurrentSourceStreamRead() { + return wrapped.concurrentSourceStreamRead(); + } + + @Override + public boolean applyFieldSelection() { + return wrapped.applyFieldSelection(); + } + + @Override + public String fieldSelectionWorkspaces() { + return wrapped.fieldSelectionWorkspaces(); + } + + @Override + public String strictComparisonNormalizationWorkspaces() { + return wrapped.strictComparisonNormalizationWorkspaces(); + } + + @Override + public String strictComparisonNormalizationTag() { + return wrapped.strictComparisonNormalizationTag(); + } + + @Override + public String deploymentMode() { + return wrapped.deploymentMode(); + } + +} diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/adaptive/AdaptiveDestinationRunner.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/adaptive/AdaptiveDestinationRunner.java index c1d0cc568dd4..878eef089be0 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/adaptive/AdaptiveDestinationRunner.java +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/adaptive/AdaptiveDestinationRunner.java @@ -10,6 +10,7 @@ import io.airbyte.cdk.integrations.base.IntegrationCliParser; import io.airbyte.cdk.integrations.base.IntegrationConfig; import io.airbyte.cdk.integrations.base.IntegrationRunner; +import io.airbyte.commons.features.EnvVariableFeatureFlags; import io.airbyte.commons.json.Jsons; import java.util.function.Supplier; import org.slf4j.Logger; @@ -23,7 +24,7 @@ public class AdaptiveDestinationRunner { private static final Logger LOGGER = LoggerFactory.getLogger(AdaptiveDestinationRunner.class); - private static final String DEPLOYMENT_MODE_KEY = "DEPLOYMENT_MODE"; + private static final String DEPLOYMENT_MODE_KEY = EnvVariableFeatureFlags.DEPLOYMENT_MODE; private static final String CLOUD_MODE = "CLOUD"; public static OssDestinationBuilder baseOnEnv() { diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/adaptive/AdaptiveSourceRunner.java b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/adaptive/AdaptiveSourceRunner.java index 2de525a66c93..4bb7f021db50 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/adaptive/AdaptiveSourceRunner.java +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/java/io/airbyte/cdk/integrations/base/adaptive/AdaptiveSourceRunner.java @@ -6,6 +6,7 @@ import io.airbyte.cdk.integrations.base.IntegrationRunner; import io.airbyte.cdk.integrations.base.Source; +import io.airbyte.commons.features.EnvVariableFeatureFlags; import java.util.function.Supplier; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -17,7 +18,7 @@ public class AdaptiveSourceRunner { private static final Logger LOGGER = LoggerFactory.getLogger(AdaptiveSourceRunner.class); - public static final String DEPLOYMENT_MODE_KEY = "DEPLOYMENT_MODE"; + public static final String DEPLOYMENT_MODE_KEY = EnvVariableFeatureFlags.DEPLOYMENT_MODE; public static final String CLOUD_MODE = "CLOUD"; public static OssSourceBuilder baseOnEnv() { diff --git a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties index 742fc25a9875..3eecc3f2524c 100644 --- a/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties +++ b/airbyte-cdk/java/airbyte-cdk/core/src/main/resources/version.properties @@ -1 +1 @@ -version=0.4.5 +version=0.4.6 diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/internals/postgres/PostgresReplicationConnection.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/internals/postgres/PostgresReplicationConnection.java index 3a6c1881edf2..85f10313db41 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/internals/postgres/PostgresReplicationConnection.java +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/debezium/internals/postgres/PostgresReplicationConnection.java @@ -48,7 +48,7 @@ public static Connection createConnection(final JsonNode jdbcConfig) throws SQLE validateReplicationConnection(connection); return connection; } catch (final PSQLException exception) { - if (exception.getMessage().equals("FATAL: must be superuser or replication role to start walsender")) { + if ("42501".equals(exception.getSQLState())) { // insufficient_privilege throw new ConfigErrorException(String.format(REPLICATION_PRIVILEGE_ERROR_MESSAGE, jdbcConfig.get(JdbcUtils.USERNAME_KEY).asText())); } throw exception; diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/AbstractDbSource.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/AbstractDbSource.java index 43105d8d13fd..0d604dce7518 100644 --- a/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/AbstractDbSource.java +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/main/java/io/airbyte/cdk/integrations/source/relationaldb/AbstractDbSource.java @@ -7,6 +7,7 @@ import static io.airbyte.cdk.integrations.base.errors.messages.ErrorMessage.getErrorMessage; import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import datadog.trace.api.Trace; import io.airbyte.cdk.db.AbstractDatabase; @@ -75,8 +76,14 @@ public abstract class AbstractDbSource(new TestEnvConfigs().getJobDefaultEnvMap()); + envMap.put(EnvVariableFeatureFlags.DEPLOYMENT_MODE, featureFlags().deploymentMode()); processFactory = new DockerProcessFactory( workspaceRoot, workspaceRoot.toString(), localRoot.toString(), "host", - new TestEnvConfigs().getJobDefaultEnvMap()); + envMap); postSetup(); } @@ -163,10 +167,14 @@ public void tearDownInternal() throws Exception { tearDown(environment); } + protected FeatureFlags featureFlags() { + return new EnvVariableFeatureFlags(); + } + protected ConnectorSpecification runSpec() throws TestHarnessException { final io.airbyte.protocol.models.ConnectorSpecification spec = new DefaultGetSpecTestHarness( new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, null, null, false, - new EnvVariableFeatureFlags())) + featureFlags())) .run(new JobGetSpecConfig().withDockerImage(getImageName()), jobRoot).getSpec(); return convertProtocolObject(spec, ConnectorSpecification.class); } @@ -174,7 +182,7 @@ protected ConnectorSpecification runSpec() throws TestHarnessException { protected StandardCheckConnectionOutput runCheck() throws Exception { return new DefaultCheckConnectionTestHarness( new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, null, null, false, - new EnvVariableFeatureFlags()), + featureFlags()), mConnectorConfigUpdater) .run(new StandardCheckConnectionInput().withConnectionConfiguration(getConfig()), jobRoot).getCheckConnection(); } @@ -182,7 +190,7 @@ protected StandardCheckConnectionOutput runCheck() throws Exception { protected String runCheckAndGetStatusAsString(final JsonNode config) throws Exception { return new DefaultCheckConnectionTestHarness( new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, null, null, false, - new EnvVariableFeatureFlags()), + featureFlags()), mConnectorConfigUpdater) .run(new StandardCheckConnectionInput().withConnectionConfiguration(config), jobRoot).getCheckConnection().getStatus().toString(); } @@ -191,7 +199,7 @@ protected UUID runDiscover() throws Exception { final UUID toReturn = new DefaultDiscoverCatalogTestHarness( mAirbyteApiClient, new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, null, null, false, - new EnvVariableFeatureFlags()), + featureFlags()), mConnectorConfigUpdater) .run(new StandardDiscoverCatalogInput().withSourceId(SOURCE_ID.toString()).withConnectionConfiguration(getConfig()), jobRoot) .getDiscoverCatalogId(); @@ -222,12 +230,10 @@ protected List runRead(final ConfiguredAirbyteCatalog catalog, f .withState(state == null ? null : new State().withState(state)) .withCatalog(convertProtocolObject(catalog, io.airbyte.protocol.models.ConfiguredAirbyteCatalog.class)); - final var featureFlags = new EnvVariableFeatureFlags(); - final AirbyteSource source = new DefaultAirbyteSource( new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, null, null, false, - featureFlags), - featureFlags); + featureFlags()), + featureFlags()); final List messages = new ArrayList<>(); source.start(sourceConfig, jobRoot); while (!source.isFinished()) { @@ -266,7 +272,6 @@ protected Map runReadVerifyNumberOfReceivedMsgs(final Configure } private AirbyteSource prepareAirbyteSource() { - final var featureFlags = new EnvVariableFeatureFlags(); final var integrationLauncher = new AirbyteIntegrationLauncher( JOB_ID, JOB_ATTEMPT, @@ -275,8 +280,8 @@ private AirbyteSource prepareAirbyteSource() { null, null, false, - featureFlags); - return new DefaultAirbyteSource(integrationLauncher, featureFlags); + featureFlags()); + return new DefaultAirbyteSource(integrationLauncher, featureFlags()); } private static V0 convertProtocolObject(final V1 v1, final Class klass) { diff --git a/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/testutils/PostgresTestDatabase.java b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/testutils/PostgresTestDatabase.java new file mode 100644 index 000000000000..facb334c50dc --- /dev/null +++ b/airbyte-cdk/java/airbyte-cdk/db-sources/src/testFixtures/java/io/airbyte/cdk/testutils/PostgresTestDatabase.java @@ -0,0 +1,292 @@ +/* + * Copyright (c) 2023 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.cdk.testutils; + +import com.google.common.collect.ImmutableMap; +import io.airbyte.cdk.db.Database; +import io.airbyte.cdk.db.PostgresUtils; +import io.airbyte.cdk.db.factory.DSLContextFactory; +import io.airbyte.cdk.db.factory.DatabaseDriver; +import io.airbyte.cdk.db.jdbc.JdbcUtils; +import io.airbyte.cdk.integrations.util.HostPortResolver; +import io.airbyte.commons.io.IOs; +import io.airbyte.commons.string.Strings; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.commons.io.FileUtils; +import org.jooq.DSLContext; +import org.jooq.SQLDialect; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.testcontainers.containers.Network; +import org.testcontainers.containers.PostgreSQLContainer; +import org.testcontainers.utility.DockerImageName; +import org.testcontainers.utility.MountableFile; + +/** + * {@link PostgresTestDatabase} is a convenience object which allows for efficient use of + * {@link PostgreSQLContainer} instances in tests. Each test container is shared throughout the + * whole JVM. Isolation is performed by creating a new database and a new user for each + * {@link PostgresTestDatabase} instance. These are dropped when the instance is closed. + */ +public class PostgresTestDatabase implements AutoCloseable { + + /** + * Create a new {@link PostgresTestDatabase} instance. + * + * @param imageName base image to use for the underlying {@link PostgreSQLContainer}. + * @param methods {@link ContainerFactory} methods that need to be called. + * @return a new {@link PostgresTestDatabase} instance which may reuse a shared + * {@link PostgreSQLContainer}. + */ + static public PostgresTestDatabase make(String imageName, String... methods) { + final String imageNamePlusMethods = Stream.concat( + Stream.of(imageName), + Stream.of(methods)) + .collect(Collectors.joining("+")); + final ContainerFactory factory = ContainerFactory.LAZY.computeIfAbsent(imageNamePlusMethods, ContainerFactory::new); + return new PostgresTestDatabase(factory.getOrCreateSharedContainer()); + } + + private PostgresTestDatabase(PostgreSQLContainer sharedContainer) { + this.container = sharedContainer; + + this.suffix = Strings.addRandomSuffix("", "_", 10); + try { + this.tmpDir = Files.createTempDirectory("dir" + suffix); + } catch (final IOException e) { + throw new UncheckedIOException(e); + } + final var dir = this.tmpDir.toFile(); + Runtime.getRuntime().addShutdownHook(new Thread(() -> FileUtils.deleteQuietly(dir))); + + this.dbName = "db" + suffix; + this.userName = "test_user" + suffix; + this.password = "test_password" + suffix; + + final Path script = this.tmpDir.resolve("create" + suffix + ".sql"); + IOs.writeFile(script, String.format(""" + CREATE DATABASE %s; + CREATE USER %s PASSWORD '%s'; + GRANT ALL PRIVILEGES ON DATABASE %s TO %s; + ALTER USER %s WITH SUPERUSER; + """, + dbName, + userName, password, + dbName, userName, + userName)); + PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(script), sharedContainer); + + this.jdbcUrl = String.format( + DatabaseDriver.POSTGRESQL.getUrlFormatString(), + sharedContainer.getHost(), + sharedContainer.getFirstMappedPort(), + dbName); + this.dslContext = DSLContextFactory.create( + userName, + password, + DatabaseDriver.POSTGRESQL.getDriverClassName(), + jdbcUrl, + SQLDialect.POSTGRES); + this.database = new Database(dslContext); + } + + public final PostgreSQLContainer container; + public final String dbName, userName, password, jdbcUrl; + public final DSLContext dslContext; + public final Database database; + + private final Path tmpDir; + private final String suffix; + + /** + * Convenience method for building identifiers which are unique to this instance. + */ + public String withSuffix(String str) { + return str + suffix; + } + + /** + * Convenience method for initializing a config builder for use in integration tests. + */ + public ImmutableMap.Builder makeConfigBuilder() { + return ImmutableMap.builder() + .put(JdbcUtils.HOST_KEY, HostPortResolver.resolveHost(container)) + .put(JdbcUtils.PORT_KEY, HostPortResolver.resolvePort(container)) + .put(JdbcUtils.DATABASE_KEY, dbName) + .put(JdbcUtils.USERNAME_KEY, userName) + .put(JdbcUtils.PASSWORD_KEY, password); + } + + /** + * @return the {@link PostgresUtils.Certificate} for this instance; requires + * {@link ContainerFactory#withCert} call. + */ + public PostgresUtils.Certificate getCertificate() { + final String caCert, clientKey, clientCert; + try { + caCert = container.execInContainer("su", "-c", "cat ca.crt").getStdout().trim(); + clientKey = container.execInContainer("su", "-c", "cat client.key").getStdout().trim(); + clientCert = container.execInContainer("su", "-c", "cat client.crt").getStdout().trim(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + return new PostgresUtils.Certificate(caCert, clientCert, clientKey); + } + + @Override + public void close() { + dslContext.close(); + final Path script = this.tmpDir.resolve("drop" + suffix + ".sql"); + IOs.writeFile(script, String.format(""" + DROP USER %s; + DROP DATABASE %s; + """, + userName, + dbName)); + PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(script), container); + } + + static private class ContainerFactory { + + static private final Logger LOGGER = LoggerFactory.getLogger(ContainerFactory.class); + static private final ConcurrentHashMap LAZY = new ConcurrentHashMap<>(); + + final private String imageName; + final private List methods; + private PostgreSQLContainer sharedContainer; + + private ContainerFactory(String imageNamePlusMethods) { + final String[] parts = imageNamePlusMethods.split("\\+"); + this.imageName = parts[0]; + this.methods = Arrays.stream(parts).skip(1).map(methodName -> { + try { + return ContainerFactory.class.getMethod(methodName); + } catch (NoSuchMethodException e) { + throw new RuntimeException(e); + } + }).toList(); + } + + private synchronized PostgreSQLContainer getOrCreateSharedContainer() { + if (sharedContainer == null) { + LOGGER.info("Creating new shared container based on {} with {}.", imageName, methods.stream().map(Method::getName).toList()); + final var parsed = DockerImageName.parse(imageName).asCompatibleSubstituteFor("postgres"); + sharedContainer = new PostgreSQLContainer<>(parsed); + for (Method method : methods) { + LOGGER.info("Calling {} on new shared container based on {}.", method.getName(), imageName); + try { + method.invoke(this); + } catch (IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException(e); + } + } + sharedContainer.start(); + } + return sharedContainer; + } + + /** + * Apply the postgresql.conf file that we've packaged as a resource. + */ + public void withConf() { + sharedContainer + .withCopyFileToContainer( + MountableFile.forClasspathResource("postgresql.conf"), + "/etc/postgresql/postgresql.conf") + .withCommand("postgres -c config_file=/etc/postgresql/postgresql.conf"); + } + + /** + * Create a new network and bind it to the container. + */ + public void withNetwork() { + sharedContainer.withNetwork(Network.newNetwork()); + } + + /** + * Configure postgres with wal_level=logical. + */ + public void withWalLevelLogical() { + sharedContainer.withCommand("postgres -c wal_level=logical"); + } + + /** + * Generate SSL certificates and tell postgres to enable SSL and use them. + */ + public void withCert() { + sharedContainer.start(); + String[] commands = { + "psql -U test -c \"CREATE USER postgres WITH PASSWORD 'postgres';\"", + "psql -U test -c \"GRANT CONNECT ON DATABASE \"test\" TO postgres;\"", + "psql -U test -c \"ALTER USER postgres WITH SUPERUSER;\"", + "openssl ecparam -name prime256v1 -genkey -noout -out ca.key", + "openssl req -new -x509 -sha256 -key ca.key -out ca.crt -subj \"/CN=127.0.0.1\"", + "openssl ecparam -name prime256v1 -genkey -noout -out server.key", + "openssl req -new -sha256 -key server.key -out server.csr -subj \"/CN=localhost\"", + "openssl x509 -req -in server.csr -CA ca.crt -CAkey ca.key -CAcreateserial -out server.crt -days 365 -sha256", + "cp server.key /etc/ssl/private/", + "cp server.crt /etc/ssl/private/", + "cp ca.crt /etc/ssl/private/", + "chmod og-rwx /etc/ssl/private/server.* /etc/ssl/private/ca.*", + "chown postgres:postgres /etc/ssl/private/server.crt /etc/ssl/private/server.key /etc/ssl/private/ca.crt", + "echo \"ssl = on\" >> /var/lib/postgresql/data/postgresql.conf", + "echo \"ssl_cert_file = '/etc/ssl/private/server.crt'\" >> /var/lib/postgresql/data/postgresql.conf", + "echo \"ssl_key_file = '/etc/ssl/private/server.key'\" >> /var/lib/postgresql/data/postgresql.conf", + "echo \"ssl_ca_file = '/etc/ssl/private/ca.crt'\" >> /var/lib/postgresql/data/postgresql.conf", + "mkdir root/.postgresql", + "echo \"hostssl all all 127.0.0.1/32 cert clientcert=verify-full\" >> /var/lib/postgresql/data/pg_hba.conf", + "openssl ecparam -name prime256v1 -genkey -noout -out client.key", + "openssl req -new -sha256 -key client.key -out client.csr -subj \"/CN=postgres\"", + "openssl x509 -req -in client.csr -CA ca.crt -CAkey ca.key -CAcreateserial -out client.crt -days 365 -sha256", + "cp client.crt ~/.postgresql/postgresql.crt", + "cp client.key ~/.postgresql/postgresql.key", + "chmod 0600 ~/.postgresql/postgresql.crt ~/.postgresql/postgresql.key", + "cp ca.crt root/.postgresql/ca.crt", + "chown postgres:postgres ~/.postgresql/ca.crt", + "psql -U test -c \"SELECT pg_reload_conf();\"", + }; + for (String cmd : commands) { + try { + sharedContainer.execInContainer("su", "-c", cmd); + } catch (IOException e) { + throw new UncheckedIOException(e); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + + /** + * Tell postgres to enable SSL. + */ + public void withSSL() { + sharedContainer.withCommand("postgres " + + "-c ssl=on " + + "-c ssl_cert_file=/var/lib/postgresql/server.crt " + + "-c ssl_key_file=/var/lib/postgresql/server.key"); + } + + /** + * Configure postgres with client_encoding=sql_ascii. + */ + public void withASCII() { + sharedContainer.withCommand("postgres -c client_encoding=sql_ascii"); + } + + } + +} diff --git a/airbyte-integrations/connectors/source-postgres/build.gradle b/airbyte-integrations/connectors/source-postgres/build.gradle index 7f924025f46f..7a94eedafb03 100644 --- a/airbyte-integrations/connectors/source-postgres/build.gradle +++ b/airbyte-integrations/connectors/source-postgres/build.gradle @@ -13,7 +13,7 @@ java { } airbyteJavaConnector { - cdkVersionRequired = '0.4.1' + cdkVersionRequired = '0.4.6' features = ['db-sources'] useLocalCdk = false } diff --git a/airbyte-integrations/connectors/source-postgres/gradle.properties b/airbyte-integrations/connectors/source-postgres/gradle.properties new file mode 100644 index 000000000000..8ef098d20b92 --- /dev/null +++ b/airbyte-integrations/connectors/source-postgres/gradle.properties @@ -0,0 +1 @@ +testExecutionConcurrency=-1 \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java index 2591e6e3d2c8..1e07cea42090 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java @@ -63,8 +63,6 @@ import io.airbyte.cdk.integrations.source.relationaldb.state.StateManager; import io.airbyte.cdk.integrations.util.HostPortResolver; import io.airbyte.commons.exceptions.ConfigErrorException; -import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.commons.features.FeatureFlags; import io.airbyte.commons.functional.CheckedConsumer; import io.airbyte.commons.functional.CheckedFunction; import io.airbyte.commons.json.Jsons; @@ -123,8 +121,6 @@ public class PostgresSource extends AbstractJdbcSource implements Source { - // This is not a static because we want it to be initialized at instance creation time - private final String DEPLOYMENT_MODE = System.getenv(AdaptiveSourceRunner.DEPLOYMENT_MODE_KEY); private static final Logger LOGGER = LoggerFactory.getLogger(PostgresSource.class); private static final int INTERMEDIATE_STATE_EMISSION_FREQUENCY = 10_000; public static final String PARAM_SSLMODE = "sslmode"; @@ -147,24 +143,22 @@ public class PostgresSource extends AbstractJdbcSource implements private List schemas; private Set publicizedTablesInCdc; - private final FeatureFlags featureFlags; private static final Set INVALID_CDC_SSL_MODES = ImmutableSet.of("allow", "prefer"); private int stateEmissionFrequency; private XminStatus xminStatus; - public static Source sshWrappedSource() { - return new SshWrappedSource(new PostgresSource(), JdbcUtils.HOST_LIST_KEY, JdbcUtils.PORT_LIST_KEY, "security"); + public static Source sshWrappedSource(PostgresSource source) { + return new SshWrappedSource(source, JdbcUtils.HOST_LIST_KEY, JdbcUtils.PORT_LIST_KEY, "security"); } PostgresSource() { super(DRIVER_CLASS, AdaptiveStreamingQueryConfig::new, new PostgresSourceOperations()); - this.featureFlags = new EnvVariableFeatureFlags(); this.stateEmissionFrequency = INTERMEDIATE_STATE_EMISSION_FREQUENCY; } @Override public ConnectorSpecification spec() throws Exception { - if (DEPLOYMENT_MODE != null && DEPLOYMENT_MODE.equalsIgnoreCase(AdaptiveSourceRunner.CLOUD_MODE)) { + if (cloudDeploymentMode()) { final ConnectorSpecification spec = Jsons.clone(super.spec()); final ObjectNode properties = (ObjectNode) spec.getConnectionSpecification().get("properties"); ((ObjectNode) properties.get(SSL_MODE)).put("default", SSL_MODE_REQUIRE); @@ -711,7 +705,7 @@ protected void setStateEmissionFrequencyForDebug(final int stateEmissionFrequenc } public static void main(final String[] args) throws Exception { - final Source source = PostgresSource.sshWrappedSource(); + final Source source = PostgresSource.sshWrappedSource(new PostgresSource()); LOGGER.info("starting source: {}", PostgresSource.class); new IntegrationRunner(source).run(args); LOGGER.info("completed source: {}", PostgresSource.class); @@ -722,7 +716,7 @@ public static void main(final String[] args) throws Exception { public AirbyteConnectionStatus check(final JsonNode config) throws Exception { // #15808 Disallow connecting to db with disable, prefer or allow SSL mode when connecting directly // and not over SSH tunnel - if (AdaptiveSourceRunner.CLOUD_MODE.equalsIgnoreCase(DEPLOYMENT_MODE)) { + if (cloudDeploymentMode()) { LOGGER.info("Source configured as in Cloud Deployment mode"); if (config.has(TUNNEL_METHOD) && config.get(TUNNEL_METHOD).has(TUNNEL_METHOD) @@ -845,4 +839,8 @@ private List getFullTableEstimate(final JdbcDatabase database, return jsonNodes; } + private boolean cloudDeploymentMode() { + return AdaptiveSourceRunner.CLOUD_MODE.equalsIgnoreCase(featureFlags.deploymentMode()); + } + } diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractCdcPostgresSourceSslAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractCdcPostgresSourceSslAcceptanceTest.java index 77613a968034..ea68ae74436b 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractCdcPostgresSourceSslAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractCdcPostgresSourceSslAcceptanceTest.java @@ -4,23 +4,14 @@ package io.airbyte.integrations.io.airbyte.integration_tests.sources; -import static io.airbyte.cdk.db.PostgresUtils.getCertificate; - import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.Database; import io.airbyte.cdk.db.PostgresUtils; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; import io.airbyte.cdk.db.jdbc.JdbcUtils; import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv; -import io.airbyte.cdk.integrations.util.HostPortResolver; +import io.airbyte.cdk.testutils.PostgresTestDatabase; import io.airbyte.commons.json.Jsons; import java.util.List; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.DockerImageName; public abstract class AbstractCdcPostgresSourceSslAcceptanceTest extends CdcPostgresSourceAcceptanceTest { @@ -29,52 +20,33 @@ public abstract class AbstractCdcPostgresSourceSslAcceptanceTest extends CdcPost @Override protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { - container = new PostgreSQLContainer<>(DockerImageName.parse(getServerImageName()) - .asCompatibleSubstituteFor("postgres")) - .withCommand("postgres -c wal_level=logical"); - container.start(); - - certs = getCertificate(container); + testdb = PostgresTestDatabase.make(getServerImageName(), "withWalLevelLogical", "withCert"); + certs = testdb.getCertificate(); + slotName = testdb.withSuffix("debezium_slot"); + publication = testdb.withSuffix("publication"); final JsonNode replicationMethod = Jsons.jsonNode(ImmutableMap.builder() .put("method", "CDC") - .put("replication_slot", SLOT_NAME_BASE) - .put("publication", PUBLICATION) + .put("replication_slot", slotName) + .put("publication", publication) .put("initial_waiting_seconds", INITIAL_WAITING_SECONDS) .build()); - config = Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, HostPortResolver.resolveHost(container)) - .put(JdbcUtils.PORT_KEY, HostPortResolver.resolvePort(container)) - .put(JdbcUtils.DATABASE_KEY, container.getDatabaseName()) + config = Jsons.jsonNode(testdb.makeConfigBuilder() .put(JdbcUtils.SCHEMAS_KEY, List.of(NAMESPACE)) - .put(JdbcUtils.USERNAME_KEY, container.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, container.getPassword()) .put("replication_method", replicationMethod) .put(JdbcUtils.SSL_KEY, true) .put("ssl_mode", getCertificateConfiguration()) .put("is_test", true) .build()); - try (final DSLContext dslContext = DSLContextFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - container.getHost(), - container.getFirstMappedPort(), - config.get(JdbcUtils.DATABASE_KEY).asText()), - SQLDialect.POSTGRES)) { - final Database database = new Database(dslContext); - - database.query(ctx -> { - ctx.execute("CREATE TABLE id_and_name(id INTEGER primary key, name VARCHAR(200));"); - ctx.execute("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - ctx.execute("CREATE TABLE starships(id INTEGER primary key, name VARCHAR(200));"); - ctx.execute("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); - ctx.execute("SELECT pg_create_logical_replication_slot('" + SLOT_NAME_BASE + "', 'pgoutput');"); - ctx.execute("CREATE PUBLICATION " + PUBLICATION + " FOR ALL TABLES;"); - return null; - }); - } + testdb.database.query(ctx -> { + ctx.execute("CREATE TABLE id_and_name(id INTEGER primary key, name VARCHAR(200));"); + ctx.execute("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); + ctx.execute("CREATE TABLE starships(id INTEGER primary key, name VARCHAR(200));"); + ctx.execute("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + ctx.execute("SELECT pg_create_logical_replication_slot('" + slotName + "', 'pgoutput');"); + ctx.execute("CREATE PUBLICATION " + publication + " FOR ALL TABLES;"); + return null; + }); } protected abstract String getServerImageName(); diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractPostgresSourceDatatypeTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractPostgresSourceDatatypeTest.java index 3c314ebd4552..f958c8cf3360 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractPostgresSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractPostgresSourceDatatypeTest.java @@ -13,17 +13,18 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.cdk.integrations.standardtest.source.AbstractSourceDatabaseTypeTest; import io.airbyte.cdk.integrations.standardtest.source.TestDataHolder; +import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv; +import io.airbyte.cdk.testutils.PostgresTestDatabase; import io.airbyte.protocol.models.JsonSchemaPrimitiveUtil.JsonSchemaPrimitive; import io.airbyte.protocol.models.JsonSchemaType; +import java.sql.SQLException; import java.util.Set; -import org.jooq.DSLContext; -import org.testcontainers.containers.PostgreSQLContainer; public abstract class AbstractPostgresSourceDatatypeTest extends AbstractSourceDatabaseTypeTest { - protected PostgreSQLContainer container; + protected PostgresTestDatabase testdb; protected JsonNode config; - protected DSLContext dslContext; + protected static final String SCHEMA_NAME = "test"; @Override @@ -41,6 +42,11 @@ protected JsonNode getConfig() { return config; } + @Override + protected void tearDown(final TestDestinationEnv testEnv) throws SQLException { + testdb.close(); + } + @Override public boolean testCatalog() { return true; diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractPostgresSourceSSLCertificateAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractPostgresSourceSSLCertificateAcceptanceTest.java index 81bc5847b99a..6014ca946500 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractPostgresSourceSSLCertificateAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractPostgresSourceSSLCertificateAcceptanceTest.java @@ -4,18 +4,14 @@ package io.airbyte.integrations.io.airbyte.integration_tests.sources; -import static io.airbyte.cdk.db.PostgresUtils.getCertificate; - import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import io.airbyte.cdk.db.Database; import io.airbyte.cdk.db.PostgresUtils; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv; -import io.airbyte.cdk.integrations.util.HostPortResolver; -import io.airbyte.commons.features.EnvVariableFeatureFlags; +import io.airbyte.cdk.testutils.PostgresTestDatabase; +import io.airbyte.commons.features.FeatureFlags; +import io.airbyte.commons.features.FeatureFlagsWrapper; import io.airbyte.commons.json.Jsons; import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaType; @@ -26,80 +22,53 @@ import io.airbyte.protocol.models.v0.SyncMode; import java.util.HashMap; import java.util.List; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.extension.ExtendWith; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.DockerImageName; -import uk.org.webcompere.systemstubs.environment.EnvironmentVariables; -import uk.org.webcompere.systemstubs.jupiter.SystemStub; -import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; -@ExtendWith(SystemStubsExtension.class) public abstract class AbstractPostgresSourceSSLCertificateAcceptanceTest extends AbstractPostgresSourceAcceptanceTest { private static final String STREAM_NAME = "id_and_name"; private static final String STREAM_NAME2 = "starships"; private static final String STREAM_NAME_MATERIALIZED_VIEW = "testview"; private static final String SCHEMA_NAME = "public"; - @SystemStub - private EnvironmentVariables environmentVariables; - private PostgreSQLContainer container; + + private PostgresTestDatabase testdb; private JsonNode config; protected static final String PASSWORD = "Passw0rd"; protected static PostgresUtils.Certificate certs; @Override - protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { - environmentVariables.set(EnvVariableFeatureFlags.USE_STREAM_CAPABLE_STATE, "true"); + protected FeatureFlags featureFlags() { + return FeatureFlagsWrapper.overridingUseStreamCapableState(super.featureFlags(), true); + } - container = new PostgreSQLContainer<>(DockerImageName.parse("postgres:bullseye") - .asCompatibleSubstituteFor("postgres")); - container.start(); - certs = getCertificate(container); + @Override + protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { + testdb = PostgresTestDatabase.make("postgres:16-bullseye", "withCert"); + certs = testdb.getCertificate(); final JsonNode replicationMethod = Jsons.jsonNode(ImmutableMap.builder() .put("method", "Standard") .build()); - config = Jsons.jsonNode(ImmutableMap.builder() - .put("host", HostPortResolver.resolveHost(container)) - .put("port", HostPortResolver.resolvePort(container)) - .put("database", container.getDatabaseName()) + config = Jsons.jsonNode(testdb.makeConfigBuilder() .put("schemas", Jsons.jsonNode(List.of("public"))) - .put("username", "postgres") - .put("password", "postgres") .put("ssl", true) .put("replication_method", replicationMethod) .put("ssl_mode", getCertificateConfiguration()) .build()); - - try (final DSLContext dslContext = DSLContextFactory.create( - config.get("username").asText(), - config.get("password").asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - container.getHost(), - container.getFirstMappedPort(), - config.get("database").asText()), - SQLDialect.POSTGRES)) { - final Database database = new Database(dslContext); - - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); - ctx.fetch("CREATE MATERIALIZED VIEW testview AS select * from id_and_name where id = '2';"); - return null; - }); - } + testdb.database.query(ctx -> { + ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); + ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + ctx.fetch("CREATE MATERIALIZED VIEW testview AS select * from id_and_name where id = '2';"); + return null; + }); } public abstract ImmutableMap getCertificateConfiguration(); @Override protected void tearDown(final TestDestinationEnv testEnv) { - container.close(); + testdb.close(); } @Override diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshPostgresSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshPostgresSourceAcceptanceTest.java index fc318747408d..11e848d94275 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshPostgresSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/AbstractSshPostgresSourceAcceptanceTest.java @@ -13,7 +13,9 @@ import io.airbyte.cdk.integrations.base.ssh.SshBastionContainer; import io.airbyte.cdk.integrations.base.ssh.SshTunnel; import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv; -import io.airbyte.commons.features.EnvVariableFeatureFlags; +import io.airbyte.cdk.testutils.PostgresTestDatabase; +import io.airbyte.commons.features.FeatureFlags; +import io.airbyte.commons.features.FeatureFlagsWrapper; import io.airbyte.commons.functional.CheckedFunction; import io.airbyte.commons.json.Jsons; import io.airbyte.protocol.models.Field; @@ -26,28 +28,22 @@ import java.util.HashMap; import java.util.List; import org.jooq.SQLDialect; -import org.junit.jupiter.api.extension.ExtendWith; -import org.testcontainers.containers.Network; -import org.testcontainers.containers.PostgreSQLContainer; -import uk.org.webcompere.systemstubs.environment.EnvironmentVariables; -import uk.org.webcompere.systemstubs.jupiter.SystemStub; -import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; - -@ExtendWith(SystemStubsExtension.class) + public abstract class AbstractSshPostgresSourceAcceptanceTest extends AbstractPostgresSourceAcceptanceTest { private static final String STREAM_NAME = "id_and_name"; private static final String STREAM_NAME2 = "starships"; private static final String SCHEMA_NAME = "public"; - @SystemStub - private EnvironmentVariables environmentVariables; - private static final Network network = Network.newNetwork(); - private static JsonNode config; + private final SshBastionContainer bastion = new SshBastionContainer(); - private PostgreSQLContainer db; + private PostgresTestDatabase testdb; + private JsonNode config; private void populateDatabaseTestData() throws Exception { - final var outerConfig = bastion.getTunnelConfig(getTunnelMethod(), bastion.getBasicDbConfigBuider(db, List.of("public")), false); + final var builder = testdb.makeConfigBuilder() + .put("schemas", List.of("public")) + .put("ssl", false); + final var outerConfig = bastion.getTunnelConfig(getTunnelMethod(), builder, false); SshTunnel.sshWrap( outerConfig, JdbcUtils.HOST_LIST_KEY, @@ -77,30 +73,27 @@ private static Database getDatabaseFromConfig(final JsonNode config) { public abstract SshTunnel.TunnelMethod getTunnelMethod(); + @Override + protected FeatureFlags featureFlags() { + return FeatureFlagsWrapper.overridingUseStreamCapableState(super.featureFlags(), true); + } + // todo (cgardens) - dynamically create data by generating a database with a random name instead of // requiring data to already be in place. @Override protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { - environmentVariables.set(EnvVariableFeatureFlags.USE_STREAM_CAPABLE_STATE, "true"); - startTestContainers(); - config = bastion.getTunnelConfig(getTunnelMethod(), bastion.getBasicDbConfigBuider(db, List.of("public")), true); + testdb = PostgresTestDatabase.make("postgres:16-bullseye", "withNetwork"); + bastion.initAndStartBastion(testdb.container.getNetwork()); + final var builder = testdb.makeConfigBuilder() + .put("schemas", List.of("public")) + .put("ssl", false); + config = bastion.getTunnelConfig(getTunnelMethod(), builder, true); populateDatabaseTestData(); - - } - - private void startTestContainers() { - bastion.initAndStartBastion(network); - initAndStartJdbcContainer(); - } - - private void initAndStartJdbcContainer() { - db = new PostgreSQLContainer<>("postgres:13-alpine").withNetwork(network); - db.start(); } @Override protected void tearDown(final TestDestinationEnv testEnv) { - bastion.stopAndCloseContainers(db); + bastion.stopAndClose(); } @Override diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CDCPostgresSourceCaCertificateSslAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CDCPostgresSourceCaCertificateSslAcceptanceTest.java index 1e052877768c..5c4eb2ddf57f 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CDCPostgresSourceCaCertificateSslAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CDCPostgresSourceCaCertificateSslAcceptanceTest.java @@ -18,7 +18,7 @@ public ImmutableMap getCertificateConfiguration() { @Override protected String getServerImageName() { - return "postgres:15-bullseye"; + return "postgres:16-bullseye"; } } diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CDCPostgresSourceFullCertificateSslAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CDCPostgresSourceFullCertificateSslAcceptanceTest.java index 20b264e3a0cf..15a2fca44d68 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CDCPostgresSourceFullCertificateSslAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CDCPostgresSourceFullCertificateSslAcceptanceTest.java @@ -21,7 +21,7 @@ public ImmutableMap getCertificateConfiguration() { @Override protected String getServerImageName() { - return "postgres:15-bullseye"; + return "postgres:16-bullseye"; } } diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcInitialSnapshotPostgresSourceDatatypeTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcInitialSnapshotPostgresSourceDatatypeTest.java index d04417e3aa59..6968f340210d 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcInitialSnapshotPostgresSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcInitialSnapshotPostgresSourceDatatypeTest.java @@ -7,43 +7,35 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.cdk.db.Database; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; import io.airbyte.cdk.db.jdbc.JdbcUtils; import io.airbyte.cdk.integrations.standardtest.source.TestDataHolder; import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv; -import io.airbyte.cdk.integrations.util.HostPortResolver; -import io.airbyte.commons.features.EnvVariableFeatureFlags; +import io.airbyte.cdk.testutils.PostgresTestDatabase; +import io.airbyte.commons.features.FeatureFlags; +import io.airbyte.commons.features.FeatureFlagsWrapper; import io.airbyte.commons.json.Jsons; import io.airbyte.protocol.models.JsonSchemaType; +import java.sql.SQLException; import java.util.List; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.extension.ExtendWith; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.MountableFile; -import uk.org.webcompere.systemstubs.environment.EnvironmentVariables; -import uk.org.webcompere.systemstubs.jupiter.SystemStub; -import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; -@ExtendWith(SystemStubsExtension.class) public class CdcInitialSnapshotPostgresSourceDatatypeTest extends AbstractPostgresSourceDatatypeTest { private static final String SCHEMA_NAME = "test"; - private static final String SLOT_NAME_BASE = "debezium_slot"; - private static final String PUBLICATION = "publication"; private static final int INITIAL_WAITING_SECONDS = 30; - @SystemStub - private EnvironmentVariables environmentVariables; + private String slotName; + private String publication; + + @Override + protected FeatureFlags featureFlags() { + return FeatureFlagsWrapper.overridingUseStreamCapableState(super.featureFlags(), true); + } @Override protected Database setupDatabase() throws Exception { - environmentVariables.set(EnvVariableFeatureFlags.USE_STREAM_CAPABLE_STATE, "true"); - container = new PostgreSQLContainer<>("postgres:14-alpine") - .withCopyFileToContainer(MountableFile.forClasspathResource("postgresql.conf"), - "/etc/postgresql/postgresql.conf") - .withCommand("postgres -c config_file=/etc/postgresql/postgresql.conf"); - container.start(); + testdb = PostgresTestDatabase.make("postgres:16-bullseye", "withConf"); + slotName = testdb.withSuffix("debezium_slot"); + publication = testdb.withSuffix("publication"); /** * The publication is not being set as part of the config and because of it @@ -52,57 +44,45 @@ protected Database setupDatabase() throws Exception { */ final JsonNode replicationMethod = Jsons.jsonNode(ImmutableMap.builder() .put("method", "CDC") - .put("replication_slot", SLOT_NAME_BASE) - .put("publication", PUBLICATION) + .put("replication_slot", slotName) + .put("publication", publication) .put("initial_waiting_seconds", INITIAL_WAITING_SECONDS) .build()); - config = Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, HostPortResolver.resolveHost(container)) - .put(JdbcUtils.PORT_KEY, HostPortResolver.resolvePort(container)) - .put(JdbcUtils.DATABASE_KEY, container.getDatabaseName()) + config = Jsons.jsonNode(testdb.makeConfigBuilder() .put(JdbcUtils.SCHEMAS_KEY, List.of(SCHEMA_NAME)) - .put(JdbcUtils.USERNAME_KEY, container.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, container.getPassword()) .put("replication_method", replicationMethod) .put("is_test", true) .put(JdbcUtils.SSL_KEY, false) .build()); - dslContext = DSLContextFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - container.getHost(), - container.getFirstMappedPort(), - config.get(JdbcUtils.DATABASE_KEY).asText()), - SQLDialect.POSTGRES); - final Database database = new Database(dslContext); - - database.query(ctx -> { + testdb.database.query(ctx -> { ctx.execute( - "SELECT pg_create_logical_replication_slot('" + SLOT_NAME_BASE + "', 'pgoutput');"); - ctx.execute("CREATE PUBLICATION " + PUBLICATION + " FOR ALL TABLES;"); + "SELECT pg_create_logical_replication_slot('" + slotName + "', 'pgoutput');"); + ctx.execute("CREATE PUBLICATION " + publication + " FOR ALL TABLES;"); ctx.execute("CREATE EXTENSION hstore;"); return null; }); - database.query(ctx -> ctx.fetch("CREATE SCHEMA TEST;")); - database.query(ctx -> ctx.fetch("CREATE TYPE mood AS ENUM ('sad', 'ok', 'happy');")); - database.query(ctx -> ctx.fetch("CREATE TYPE inventory_item AS (\n" + testdb.database.query(ctx -> ctx.fetch("CREATE SCHEMA TEST;")); + testdb.database.query(ctx -> ctx.fetch("CREATE TYPE mood AS ENUM ('sad', 'ok', 'happy');")); + testdb.database.query(ctx -> ctx.fetch("CREATE TYPE inventory_item AS (\n" + " name text,\n" + " supplier_id integer,\n" + " price numeric\n" + ");")); - database.query(ctx -> ctx.fetch("SET TIMEZONE TO 'MST'")); - return database; + testdb.database.query(ctx -> ctx.fetch("SET TIMEZONE TO 'MST'")); + return testdb.database; } @Override - protected void tearDown(final TestDestinationEnv testEnv) { - dslContext.close(); - container.close(); + protected void tearDown(TestDestinationEnv testEnv) throws SQLException { + testdb.database.query(ctx -> { + ctx.execute("SELECT pg_drop_replication_slot('" + slotName + "');"); + ctx.execute("DROP PUBLICATION " + publication + " CASCADE;"); + return null; + }); + super.tearDown(testEnv); } public boolean testCatalog() { diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceAcceptanceLegacyCtidTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceAcceptanceLegacyCtidTest.java index 7671d7d29d01..3701e512a319 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceAcceptanceLegacyCtidTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceAcceptanceLegacyCtidTest.java @@ -8,7 +8,7 @@ public class CdcPostgresSourceAcceptanceLegacyCtidTest extends CdcPostgresSource @Override protected String getServerImageName() { - return "postgres:13-alpine"; + return "postgres:12-bullseye"; } } diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceAcceptanceTest.java index 14fb85dcaea8..4fb133959f82 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcPostgresSourceAcceptanceTest.java @@ -10,13 +10,11 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import io.airbyte.cdk.db.Database; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; import io.airbyte.cdk.db.jdbc.JdbcUtils; import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv; -import io.airbyte.cdk.integrations.util.HostPortResolver; -import io.airbyte.commons.features.EnvVariableFeatureFlags; +import io.airbyte.cdk.testutils.PostgresTestDatabase; +import io.airbyte.commons.features.FeatureFlags; +import io.airbyte.commons.features.FeatureFlagsWrapper; import io.airbyte.commons.json.Jsons; import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaType; @@ -27,96 +25,70 @@ import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; import io.airbyte.protocol.models.v0.DestinationSyncMode; import io.airbyte.protocol.models.v0.SyncMode; +import java.sql.SQLException; import java.util.HashMap; import java.util.List; import java.util.stream.Collectors; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.MountableFile; -import uk.org.webcompere.systemstubs.environment.EnvironmentVariables; -import uk.org.webcompere.systemstubs.jupiter.SystemStub; -import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; // todo (cgardens) - Sanity check that when configured for CDC that postgres performs like any other // incremental source. As we have more sources support CDC we will find a more reusable way of doing // this, but for now this is a solid sanity check. -@ExtendWith(SystemStubsExtension.class) public class CdcPostgresSourceAcceptanceTest extends AbstractPostgresSourceAcceptanceTest { - protected static final String SLOT_NAME_BASE = "debezium_slot"; protected static final String NAMESPACE = "public"; private static final String STREAM_NAME = "id_and_name"; private static final String STREAM_NAME2 = "starships"; - protected static final String PUBLICATION = "publication"; protected static final int INITIAL_WAITING_SECONDS = 30; - protected PostgreSQLContainer container; + protected PostgresTestDatabase testdb; protected JsonNode config; + protected String slotName; + protected String publication; - @SystemStub - private EnvironmentVariables environmentVariables; - - @BeforeEach - void setup() { - environmentVariables.set(EnvVariableFeatureFlags.USE_STREAM_CAPABLE_STATE, "true"); + @Override + protected FeatureFlags featureFlags() { + return FeatureFlagsWrapper.overridingUseStreamCapableState(super.featureFlags(), true); } @Override protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { - container = new PostgreSQLContainer<>(getServerImageName()) - .withCopyFileToContainer(MountableFile.forClasspathResource("postgresql.conf"), "/etc/postgresql/postgresql.conf") - .withCommand("postgres -c config_file=/etc/postgresql/postgresql.conf"); - container.start(); - + testdb = PostgresTestDatabase.make(getServerImageName(), "withConf"); + slotName = testdb.withSuffix("debezium_slot"); + publication = testdb.withSuffix("publication"); final JsonNode replicationMethod = Jsons.jsonNode(ImmutableMap.builder() .put("method", "CDC") - .put("replication_slot", SLOT_NAME_BASE) - .put("publication", PUBLICATION) + .put("replication_slot", slotName) + .put("publication", publication) .put("initial_waiting_seconds", INITIAL_WAITING_SECONDS) .build()); - config = Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, HostPortResolver.resolveHost(container)) - .put(JdbcUtils.PORT_KEY, HostPortResolver.resolvePort(container)) - .put(JdbcUtils.DATABASE_KEY, container.getDatabaseName()) + config = Jsons.jsonNode(testdb.makeConfigBuilder() .put(JdbcUtils.SCHEMAS_KEY, List.of(NAMESPACE)) - .put(JdbcUtils.USERNAME_KEY, container.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, container.getPassword()) .put("replication_method", replicationMethod) .put(JdbcUtils.SSL_KEY, false) .put("is_test", true) .build()); - try (final DSLContext dslContext = DSLContextFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - container.getHost(), - container.getFirstMappedPort(), - config.get(JdbcUtils.DATABASE_KEY).asText()), - SQLDialect.POSTGRES)) { - final Database database = new Database(dslContext); - - database.query(ctx -> { - ctx.execute("CREATE TABLE id_and_name(id INTEGER primary key, name VARCHAR(200));"); - ctx.execute("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - ctx.execute("CREATE TABLE starships(id INTEGER primary key, name VARCHAR(200));"); - ctx.execute("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); - ctx.execute("SELECT pg_create_logical_replication_slot('" + SLOT_NAME_BASE + "', 'pgoutput');"); - ctx.execute("CREATE PUBLICATION " + PUBLICATION + " FOR ALL TABLES;"); - return null; - }); - } + testdb.database.query(ctx -> { + ctx.execute("CREATE TABLE id_and_name(id INTEGER primary key, name VARCHAR(200));"); + ctx.execute("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); + ctx.execute("CREATE TABLE starships(id INTEGER primary key, name VARCHAR(200));"); + ctx.execute("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + ctx.execute("SELECT pg_create_logical_replication_slot('" + slotName + "', 'pgoutput');"); + ctx.execute("CREATE PUBLICATION " + publication + " FOR ALL TABLES;"); + return null; + }); } @Override - protected void tearDown(final TestDestinationEnv testEnv) { - container.close(); + protected void tearDown(final TestDestinationEnv testEnv) throws SQLException { + testdb.database.query(ctx -> { + ctx.execute("SELECT pg_drop_replication_slot('" + slotName + "');"); + ctx.execute("DROP PUBLICATION " + publication + " CASCADE;"); + return null; + }); + testdb.close(); } @Override @@ -223,7 +195,7 @@ private void verifyFieldNotExist(final List records, final } protected String getServerImageName() { - return "postgres:15-alpine"; + return "postgres:16-bullseye"; } } diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcWalLogsPostgresSourceDatatypeTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcWalLogsPostgresSourceDatatypeTest.java index 8a28044a0035..8268af8ce4f1 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcWalLogsPostgresSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcWalLogsPostgresSourceDatatypeTest.java @@ -7,41 +7,30 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.cdk.db.Database; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; import io.airbyte.cdk.db.jdbc.JdbcUtils; import io.airbyte.cdk.integrations.standardtest.source.TestDataHolder; import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv; -import io.airbyte.cdk.integrations.util.HostPortResolver; -import io.airbyte.commons.features.EnvVariableFeatureFlags; +import io.airbyte.cdk.testutils.PostgresTestDatabase; +import io.airbyte.commons.features.FeatureFlags; +import io.airbyte.commons.features.FeatureFlagsWrapper; import io.airbyte.commons.json.Jsons; import io.airbyte.protocol.models.JsonSchemaType; import io.airbyte.protocol.models.v0.AirbyteMessage; import io.airbyte.protocol.models.v0.AirbyteStateMessage; import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.v0.ConfiguredAirbyteStream; +import java.sql.SQLException; import java.util.Collections; import java.util.List; import java.util.Set; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.extension.ExtendWith; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.MountableFile; -import uk.org.webcompere.systemstubs.environment.EnvironmentVariables; -import uk.org.webcompere.systemstubs.jupiter.SystemStub; -import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; - -@ExtendWith(SystemStubsExtension.class) + public class CdcWalLogsPostgresSourceDatatypeTest extends AbstractPostgresSourceDatatypeTest { private static final String SCHEMA_NAME = "test"; - private static final String SLOT_NAME_BASE = "debezium_slot"; - private static final String PUBLICATION = "publication"; private static final int INITIAL_WAITING_SECONDS = 30; private JsonNode stateAfterFirstSync; - - @SystemStub - private EnvironmentVariables environmentVariables; + private String slotName; + private String publication; @Override protected List runRead(final ConfiguredAirbyteCatalog configuredCatalog) throws Exception { @@ -83,14 +72,16 @@ protected void postSetup() throws Exception { } } + @Override + protected FeatureFlags featureFlags() { + return FeatureFlagsWrapper.overridingUseStreamCapableState(super.featureFlags(), true); + } + @Override protected Database setupDatabase() throws Exception { - environmentVariables.set(EnvVariableFeatureFlags.USE_STREAM_CAPABLE_STATE, "true"); - container = new PostgreSQLContainer<>("postgres:15-alpine") - .withCopyFileToContainer(MountableFile.forClasspathResource("postgresql.conf"), - "/etc/postgresql/postgresql.conf") - .withCommand("postgres -c config_file=/etc/postgresql/postgresql.conf"); - container.start(); + testdb = PostgresTestDatabase.make("postgres:16-bullseye", "withConf"); + slotName = testdb.withSuffix("debezium_slot"); + publication = testdb.withSuffix("publication"); /** * The publication is not being set as part of the config and because of it @@ -99,57 +90,45 @@ protected Database setupDatabase() throws Exception { */ final JsonNode replicationMethod = Jsons.jsonNode(ImmutableMap.builder() .put("method", "CDC") - .put("replication_slot", SLOT_NAME_BASE) - .put("publication", PUBLICATION) + .put("replication_slot", slotName) + .put("publication", publication) .put("initial_waiting_seconds", INITIAL_WAITING_SECONDS) .build()); - config = Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, HostPortResolver.resolveHost(container)) - .put(JdbcUtils.PORT_KEY, HostPortResolver.resolvePort(container)) - .put(JdbcUtils.DATABASE_KEY, container.getDatabaseName()) + config = Jsons.jsonNode(testdb.makeConfigBuilder() .put(JdbcUtils.SCHEMAS_KEY, List.of(SCHEMA_NAME)) - .put(JdbcUtils.USERNAME_KEY, container.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, container.getPassword()) .put("replication_method", replicationMethod) .put("is_test", true) .put(JdbcUtils.SSL_KEY, false) .build()); - dslContext = DSLContextFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - container.getHost(), - container.getFirstMappedPort(), - config.get(JdbcUtils.DATABASE_KEY).asText()), - SQLDialect.POSTGRES); - final Database database = new Database(dslContext); - - database.query(ctx -> { + testdb.database.query(ctx -> { ctx.execute( - "SELECT pg_create_logical_replication_slot('" + SLOT_NAME_BASE + "', 'pgoutput');"); - ctx.execute("CREATE PUBLICATION " + PUBLICATION + " FOR ALL TABLES;"); + "SELECT pg_create_logical_replication_slot('" + slotName + "', 'pgoutput');"); + ctx.execute("CREATE PUBLICATION " + publication + " FOR ALL TABLES;"); ctx.execute("CREATE EXTENSION hstore;"); return null; }); - database.query(ctx -> ctx.fetch("CREATE SCHEMA TEST;")); - database.query(ctx -> ctx.fetch("CREATE TYPE mood AS ENUM ('sad', 'ok', 'happy');")); - database.query(ctx -> ctx.fetch("CREATE TYPE inventory_item AS (\n" + testdb.database.query(ctx -> ctx.fetch("CREATE SCHEMA TEST;")); + testdb.database.query(ctx -> ctx.fetch("CREATE TYPE mood AS ENUM ('sad', 'ok', 'happy');")); + testdb.database.query(ctx -> ctx.fetch("CREATE TYPE inventory_item AS (\n" + " name text,\n" + " supplier_id integer,\n" + " price numeric\n" + ");")); - database.query(ctx -> ctx.fetch("SET TIMEZONE TO 'MST'")); - return database; + testdb.database.query(ctx -> ctx.fetch("SET TIMEZONE TO 'MST'")); + return testdb.database; } @Override - protected void tearDown(final TestDestinationEnv testEnv) { - dslContext.close(); - container.close(); + protected void tearDown(TestDestinationEnv testEnv) throws SQLException { + testdb.database.query(ctx -> { + ctx.execute("SELECT pg_drop_replication_slot('" + slotName + "');"); + ctx.execute("DROP PUBLICATION " + publication + " CASCADE;"); + return null; + }); + super.tearDown(testEnv); } public boolean testCatalog() { diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CloudDeploymentPostgresSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CloudDeploymentPostgresSourceAcceptanceTest.java index 1f4de7d633a2..c6a7c9cf6465 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CloudDeploymentPostgresSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CloudDeploymentPostgresSourceAcceptanceTest.java @@ -4,21 +4,17 @@ package io.airbyte.integrations.io.airbyte.integration_tests.sources; -import static io.airbyte.cdk.db.PostgresUtils.getCertificate; - import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import io.airbyte.cdk.db.Database; import io.airbyte.cdk.db.PostgresUtils; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; -import io.airbyte.cdk.db.jdbc.JdbcUtils; import io.airbyte.cdk.integrations.base.adaptive.AdaptiveSourceRunner; import io.airbyte.cdk.integrations.base.ssh.SshHelpers; import io.airbyte.cdk.integrations.standardtest.source.SourceAcceptanceTest; import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv; -import io.airbyte.commons.features.EnvVariableFeatureFlags; +import io.airbyte.cdk.testutils.PostgresTestDatabase; +import io.airbyte.commons.features.FeatureFlags; +import io.airbyte.commons.features.FeatureFlagsWrapper; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; import io.airbyte.protocol.models.Field; @@ -32,48 +28,36 @@ import java.util.HashMap; import java.util.List; import java.util.Optional; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.extension.ExtendWith; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.DockerImageName; -import uk.org.webcompere.systemstubs.environment.EnvironmentVariables; -import uk.org.webcompere.systemstubs.jupiter.SystemStub; -import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; - -@ExtendWith(SystemStubsExtension.class) + public class CloudDeploymentPostgresSourceAcceptanceTest extends SourceAcceptanceTest { private static final String STREAM_NAME = "id_and_name"; private static final String STREAM_NAME2 = "starships"; private static final String SCHEMA_NAME = "public"; - @SystemStub - private EnvironmentVariables environmentVariables; - private PostgreSQLContainer container; + + private PostgresTestDatabase testdb; private JsonNode config; protected static final String PASSWORD = "Passw0rd"; protected static PostgresUtils.Certificate certs; + @Override + protected FeatureFlags featureFlags() { + return FeatureFlagsWrapper.overridingDeploymentMode( + FeatureFlagsWrapper.overridingUseStreamCapableState( + super.featureFlags(), + true), + AdaptiveSourceRunner.CLOUD_MODE); + } + @Override protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { - environmentVariables.set(AdaptiveSourceRunner.DEPLOYMENT_MODE_KEY, AdaptiveSourceRunner.CLOUD_MODE); - environmentVariables.set(EnvVariableFeatureFlags.USE_STREAM_CAPABLE_STATE, "true"); - container = new PostgreSQLContainer<>(DockerImageName.parse("postgres:bullseye") - .asCompatibleSubstituteFor("postgres")); - container.start(); - certs = getCertificate(container); + testdb = PostgresTestDatabase.make("postgres:16-bullseye", "withCert"); + certs = testdb.getCertificate(); final JsonNode replicationMethod = Jsons.jsonNode(ImmutableMap.builder() .put("method", "Standard") .build()); - final var containerOuterAddress = SshHelpers.getOuterContainerAddress(container); - final var containerInnerAddress = SshHelpers.getInnerContainerAddress(container); - config = Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, containerInnerAddress.left) - .put(JdbcUtils.PORT_KEY, containerInnerAddress.right) - .put(JdbcUtils.DATABASE_KEY, container.getDatabaseName()) - .put(JdbcUtils.USERNAME_KEY, container.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, container.getPassword()) + config = Jsons.jsonNode(testdb.makeConfigBuilder() .put("replication_method", replicationMethod) .put("ssl_mode", ImmutableMap.builder() .put("mode", "verify-ca") @@ -84,30 +68,18 @@ protected void setupEnvironment(final TestDestinationEnv environment) throws Exc .build()) .build()); - try (final DSLContext dslContext = DSLContextFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - containerOuterAddress.left, - containerOuterAddress.right, - config.get(JdbcUtils.DATABASE_KEY).asText()), - SQLDialect.POSTGRES)) { - final Database database = new Database(dslContext); - - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); - return null; - }); - } + testdb.database.query(ctx -> { + ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); + ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + return null; + }); } @Override protected void tearDown(final TestDestinationEnv testEnv) { - container.close(); + testdb.close(); } @Override diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceAcceptanceLegacyCtidTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceAcceptanceLegacyCtidTest.java index f0fce358a3c0..a0c99b2110f8 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceAcceptanceLegacyCtidTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceAcceptanceLegacyCtidTest.java @@ -8,7 +8,7 @@ public class PostgresSourceAcceptanceLegacyCtidTest extends PostgresSourceAccept @Override protected String getServerImageName() { - return "postgres:13-alpine"; + return "postgres:12-bullseye"; } } diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceAcceptanceTest.java index ceab718a5522..2c7c8cf13612 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceAcceptanceTest.java @@ -11,12 +11,12 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.airbyte.cdk.db.Database; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; import io.airbyte.cdk.db.jdbc.JdbcUtils; import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv; import io.airbyte.cdk.integrations.util.HostPortResolver; -import io.airbyte.commons.features.EnvVariableFeatureFlags; +import io.airbyte.cdk.testutils.PostgresTestDatabase; +import io.airbyte.commons.features.FeatureFlags; +import io.airbyte.commons.features.FeatureFlagsWrapper; import io.airbyte.commons.json.Jsons; import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaType; @@ -30,79 +30,57 @@ import java.sql.SQLException; import java.util.HashMap; import java.util.List; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.testcontainers.containers.PostgreSQLContainer; -import uk.org.webcompere.systemstubs.environment.EnvironmentVariables; -import uk.org.webcompere.systemstubs.jupiter.SystemStub; -import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; -@ExtendWith(SystemStubsExtension.class) public class PostgresSourceAcceptanceTest extends AbstractPostgresSourceAcceptanceTest { private static final String STREAM_NAME = "id_and_name"; private static final String STREAM_NAME2 = "starships"; private static final String STREAM_NAME_MATERIALIZED_VIEW = "testview"; private static final String SCHEMA_NAME = "public"; - @SystemStub - private EnvironmentVariables environmentVariables; public static final String LIMIT_PERMISSION_SCHEMA = "limit_perm_schema"; - public static final String LIMIT_PERMISSION_ROLE = "limit_perm_role"; - public static final String LIMIT_PERMISSION_ROLE_PASSWORD = "test"; - private PostgreSQLContainer container; + public final String LIMIT_PERMISSION_ROLE_PASSWORD = "test"; + + private PostgresTestDatabase testdb; private JsonNode config; - private Database database; private ConfiguredAirbyteCatalog configCatalog; + private String limitPermissionRole; @Override - protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { - environmentVariables.set(EnvVariableFeatureFlags.USE_STREAM_CAPABLE_STATE, "true"); + protected FeatureFlags featureFlags() { + return FeatureFlagsWrapper.overridingUseStreamCapableState(super.featureFlags(), true); + } - container = new PostgreSQLContainer<>(getServerImageName()); - container.start(); + @Override + protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { + testdb = PostgresTestDatabase.make(getServerImageName()); + limitPermissionRole = testdb.withSuffix("limit_perm_role"); - final String username = container.getUsername(); - final String password = container.getPassword(); final List schemas = List.of("public"); - config = getConfig(username, password, schemas); - try (final DSLContext dslContext = DSLContextFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - container.getHost(), - container.getFirstMappedPort(), - config.get(JdbcUtils.DATABASE_KEY).asText()), - SQLDialect.POSTGRES)) { - database = new Database(dslContext); - - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); - ctx.fetch("CREATE MATERIALIZED VIEW testview AS select * from id_and_name where id = '2';"); - return null; - }); - configCatalog = getCommonConfigCatalog(); - } + config = getConfig(testdb.userName, testdb.password, schemas); + testdb.database.query(ctx -> { + ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); + ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + ctx.fetch("CREATE MATERIALIZED VIEW testview AS select * from id_and_name where id = '2';"); + return null; + }); + configCatalog = getCommonConfigCatalog(); } private JsonNode getConfig(final String username, final String password, final List schemas) { final JsonNode replicationMethod = Jsons.jsonNode(ImmutableMap.builder() .put("method", "Standard") .build()); - return Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, HostPortResolver.resolveHost(container)) - .put(JdbcUtils.PORT_KEY, HostPortResolver.resolvePort(container)) - .put(JdbcUtils.DATABASE_KEY, container.getDatabaseName()) - .put(JdbcUtils.SCHEMAS_KEY, Jsons.jsonNode(schemas)) + .put(JdbcUtils.HOST_KEY, HostPortResolver.resolveHost(testdb.container)) + .put(JdbcUtils.PORT_KEY, HostPortResolver.resolvePort(testdb.container)) + .put(JdbcUtils.DATABASE_KEY, testdb.dbName) .put(JdbcUtils.USERNAME_KEY, username) .put(JdbcUtils.PASSWORD_KEY, password) + .put(JdbcUtils.SCHEMAS_KEY, Jsons.jsonNode(schemas)) .put(JdbcUtils.SSL_KEY, false) .put("replication_method", replicationMethod) .build()); @@ -110,7 +88,7 @@ private JsonNode getConfig(final String username, final String password, final L @Override protected void tearDown(final TestDestinationEnv testEnv) { - container.close(); + testdb.close(); } @Override @@ -135,16 +113,16 @@ protected boolean supportsPerStream() { @Test public void testFullRefreshWithRevokingSchemaPermissions() throws Exception { - prepareEnvForUserWithoutPermissions(database); + prepareEnvForUserWithoutPermissions(testdb.database); - config = getConfig(LIMIT_PERMISSION_ROLE, LIMIT_PERMISSION_ROLE_PASSWORD, List.of(LIMIT_PERMISSION_SCHEMA)); + config = getConfig(limitPermissionRole, LIMIT_PERMISSION_ROLE_PASSWORD, List.of(LIMIT_PERMISSION_SCHEMA)); final ConfiguredAirbyteCatalog configuredCatalog = getLimitPermissionConfiguredCatalog(); final List fullRefreshRecords = filterRecords(runRead(configuredCatalog)); final String assertionMessage = "Expected records after full refresh sync for user with schema permission"; assertFalse(fullRefreshRecords.isEmpty(), assertionMessage); - revokeSchemaPermissions(database); + revokeSchemaPermissions(testdb.database); final List lessPermFullRefreshRecords = filterRecords(runRead(configuredCatalog)); final String assertionMessageWithoutPermission = "Expected no records after full refresh sync for user without schema permission"; @@ -154,9 +132,9 @@ public void testFullRefreshWithRevokingSchemaPermissions() throws Exception { @Test public void testDiscoverWithRevokingSchemaPermissions() throws Exception { - prepareEnvForUserWithoutPermissions(database); - revokeSchemaPermissions(database); - config = getConfig(LIMIT_PERMISSION_ROLE, LIMIT_PERMISSION_ROLE_PASSWORD, List.of(LIMIT_PERMISSION_SCHEMA)); + prepareEnvForUserWithoutPermissions(testdb.database); + revokeSchemaPermissions(testdb.database); + config = getConfig(limitPermissionRole, LIMIT_PERMISSION_ROLE_PASSWORD, List.of(LIMIT_PERMISSION_SCHEMA)); runDiscover(); final AirbyteCatalog lastPersistedCatalogSecond = getLastPersistedCatalog(); @@ -166,20 +144,20 @@ public void testDiscoverWithRevokingSchemaPermissions() throws Exception { private void revokeSchemaPermissions(final Database database) throws SQLException { database.query(ctx -> { - ctx.fetch(String.format("REVOKE USAGE ON schema %s FROM %s;", LIMIT_PERMISSION_SCHEMA, LIMIT_PERMISSION_ROLE)); + ctx.fetch(String.format("REVOKE USAGE ON schema %s FROM %s;", LIMIT_PERMISSION_SCHEMA, limitPermissionRole)); return null; }); } private void prepareEnvForUserWithoutPermissions(final Database database) throws SQLException { database.query(ctx -> { - ctx.fetch(String.format("CREATE ROLE %s WITH LOGIN PASSWORD '%s';", LIMIT_PERMISSION_ROLE, LIMIT_PERMISSION_ROLE_PASSWORD)); + ctx.fetch(String.format("CREATE ROLE %s WITH LOGIN PASSWORD '%s';", limitPermissionRole, LIMIT_PERMISSION_ROLE_PASSWORD)); ctx.fetch(String.format("CREATE SCHEMA %s;", LIMIT_PERMISSION_SCHEMA)); - ctx.fetch(String.format("GRANT CONNECT ON DATABASE test TO %s;", LIMIT_PERMISSION_ROLE)); - ctx.fetch(String.format("GRANT USAGE ON schema %s TO %s;", LIMIT_PERMISSION_SCHEMA, LIMIT_PERMISSION_ROLE)); + ctx.fetch(String.format("GRANT CONNECT ON DATABASE %s TO %s;", testdb.dbName, limitPermissionRole)); + ctx.fetch(String.format("GRANT USAGE ON schema %s TO %s;", LIMIT_PERMISSION_SCHEMA, limitPermissionRole)); ctx.fetch(String.format("CREATE TABLE %s.id_and_name(id INTEGER, name VARCHAR(200));", LIMIT_PERMISSION_SCHEMA)); ctx.fetch(String.format("INSERT INTO %s.id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');", LIMIT_PERMISSION_SCHEMA)); - ctx.fetch(String.format("GRANT SELECT ON table %s.id_and_name TO %s;", LIMIT_PERMISSION_SCHEMA, LIMIT_PERMISSION_ROLE)); + ctx.fetch(String.format("GRANT SELECT ON table %s.id_and_name TO %s;", LIMIT_PERMISSION_SCHEMA, limitPermissionRole)); return null; }); } @@ -232,7 +210,7 @@ private ConfiguredAirbyteCatalog getLimitPermissionConfiguredCatalog() { } protected String getServerImageName() { - return "postgres:15-alpine"; + return "postgres:16-bullseye"; } } diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceDatatypeTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceDatatypeTest.java index f9d62ed64fb9..116ecb38767b 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/PostgresSourceDatatypeTest.java @@ -7,61 +7,31 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.cdk.db.Database; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv; -import io.airbyte.cdk.integrations.util.HostPortResolver; -import io.airbyte.commons.features.EnvVariableFeatureFlags; +import io.airbyte.cdk.testutils.PostgresTestDatabase; +import io.airbyte.commons.features.FeatureFlags; +import io.airbyte.commons.features.FeatureFlagsWrapper; import io.airbyte.commons.json.Jsons; import java.sql.SQLException; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.extension.ExtendWith; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.MountableFile; -import uk.org.webcompere.systemstubs.environment.EnvironmentVariables; -import uk.org.webcompere.systemstubs.jupiter.SystemStub; -import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; -@ExtendWith(SystemStubsExtension.class) public class PostgresSourceDatatypeTest extends AbstractPostgresSourceDatatypeTest { - @SystemStub - private EnvironmentVariables environmentVariables; + @Override + protected FeatureFlags featureFlags() { + return FeatureFlagsWrapper.overridingUseStreamCapableState(super.featureFlags(), true); + } @Override protected Database setupDatabase() throws SQLException { - environmentVariables.set(EnvVariableFeatureFlags.USE_STREAM_CAPABLE_STATE, "true"); - container = new PostgreSQLContainer<>("postgres:14-alpine") - .withCopyFileToContainer(MountableFile.forClasspathResource("postgresql.conf"), - "/etc/postgresql/postgresql.conf") - .withCommand("postgres -c config_file=/etc/postgresql/postgresql.conf"); - container.start(); + testdb = PostgresTestDatabase.make("postgres:16-bullseye", "withConf"); final JsonNode replicationMethod = Jsons.jsonNode(ImmutableMap.builder() .put("method", "Standard") .build()); - config = Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, HostPortResolver.resolveHost(container)) - .put(JdbcUtils.PORT_KEY, HostPortResolver.resolvePort(container)) - .put(JdbcUtils.DATABASE_KEY, container.getDatabaseName()) - .put(JdbcUtils.USERNAME_KEY, container.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, container.getPassword()) + config = Jsons.jsonNode(testdb.makeConfigBuilder() .put(JdbcUtils.SSL_KEY, false) .put("replication_method", replicationMethod) .build()); - - dslContext = DSLContextFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - container.getHost(), - container.getFirstMappedPort(), - config.get(JdbcUtils.DATABASE_KEY).asText()), - SQLDialect.POSTGRES); - final Database database = new Database(dslContext); - - database.query(ctx -> { + testdb.database.query(ctx -> { ctx.execute(String.format("CREATE SCHEMA %S;", SCHEMA_NAME)); ctx.execute("CREATE TYPE mood AS ENUM ('sad', 'ok', 'happy');"); ctx.execute("CREATE TYPE inventory_item AS (name text, supplier_id integer, price numeric);"); @@ -78,13 +48,7 @@ protected Database setupDatabase() throws SQLException { return null; }); - return database; - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) { - dslContext.close(); - container.close(); + return testdb.database; } @Override diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/XminPostgresSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/XminPostgresSourceAcceptanceTest.java index 333771ce0093..2a2a7be36c44 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/XminPostgresSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/XminPostgresSourceAcceptanceTest.java @@ -7,13 +7,11 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import io.airbyte.cdk.db.Database; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; import io.airbyte.cdk.db.jdbc.JdbcUtils; import io.airbyte.cdk.integrations.standardtest.source.TestDestinationEnv; -import io.airbyte.cdk.integrations.util.HostPortResolver; -import io.airbyte.commons.features.EnvVariableFeatureFlags; +import io.airbyte.cdk.testutils.PostgresTestDatabase; +import io.airbyte.commons.features.FeatureFlags; +import io.airbyte.commons.features.FeatureFlagsWrapper; import io.airbyte.commons.json.Jsons; import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaType; @@ -24,27 +22,16 @@ import io.airbyte.protocol.models.v0.SyncMode; import java.util.HashMap; import java.util.List; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.extension.ExtendWith; -import org.testcontainers.containers.PostgreSQLContainer; -import uk.org.webcompere.systemstubs.environment.EnvironmentVariables; -import uk.org.webcompere.systemstubs.jupiter.SystemStub; -import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; -@ExtendWith(SystemStubsExtension.class) public class XminPostgresSourceAcceptanceTest extends AbstractPostgresSourceAcceptanceTest { private static final String STREAM_NAME = "id_and_name"; private static final String STREAM_NAME2 = "starships"; private static final String STREAM_NAME_MATERIALIZED_VIEW = "testview"; private static final String SCHEMA_NAME = "public"; - @SystemStub - private EnvironmentVariables environmentVariables; - private PostgreSQLContainer container; + private PostgresTestDatabase testdb; private JsonNode config; - private Database database; private ConfiguredAirbyteCatalog configCatalog; @Override @@ -53,57 +40,36 @@ protected JsonNode getConfig() throws Exception { } @Override - protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { - environmentVariables.set(EnvVariableFeatureFlags.USE_STREAM_CAPABLE_STATE, "true"); - - container = new PostgreSQLContainer<>("postgres:13-alpine"); - container.start(); - final String username = container.getUsername(); - final String password = container.getPassword(); - final List schemas = List.of("public"); - config = getXminConfig(username, password, schemas); - try (final DSLContext dslContext = DSLContextFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - container.getHost(), - container.getFirstMappedPort(), - config.get(JdbcUtils.DATABASE_KEY).asText()), - SQLDialect.POSTGRES)) { - database = new Database(dslContext); - - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); - ctx.fetch("CREATE MATERIALIZED VIEW testview AS select * from id_and_name where id = '2';"); - return null; - }); - configCatalog = getXminCatalog(); - } + protected FeatureFlags featureFlags() { + return FeatureFlagsWrapper.overridingUseStreamCapableState(super.featureFlags(), true); } - private JsonNode getXminConfig(final String username, final String password, final List schemas) { + @Override + protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { + testdb = PostgresTestDatabase.make("postgres:12-bullseye"); final JsonNode replicationMethod = Jsons.jsonNode(ImmutableMap.builder() .put("method", "Xmin") .build()); - return Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, HostPortResolver.resolveHost(container)) - .put(JdbcUtils.PORT_KEY, HostPortResolver.resolvePort(container)) - .put(JdbcUtils.DATABASE_KEY, container.getDatabaseName()) - .put(JdbcUtils.SCHEMAS_KEY, Jsons.jsonNode(schemas)) - .put(JdbcUtils.USERNAME_KEY, username) - .put(JdbcUtils.PASSWORD_KEY, password) + config = Jsons.jsonNode(testdb.makeConfigBuilder() + .put(JdbcUtils.SCHEMAS_KEY, Jsons.jsonNode(List.of("public"))) .put(JdbcUtils.SSL_KEY, false) .put("replication_method", replicationMethod) .build()); + + testdb.database.query(ctx -> { + ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); + ctx.fetch("CREATE TABLE starships(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + ctx.fetch("CREATE MATERIALIZED VIEW testview AS select * from id_and_name where id = '2';"); + return null; + }); + configCatalog = getXminCatalog(); } @Override protected void tearDown(final TestDestinationEnv testEnv) throws Exception { - container.close(); + testdb.close(); } @Override diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceLegacyCtidTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceLegacyCtidTest.java index 2461331a5ca1..3ba91d1c4656 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceLegacyCtidTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceLegacyCtidTest.java @@ -6,9 +6,8 @@ public class CdcPostgresSourceLegacyCtidTest extends CdcPostgresSourceTest { - @Override - protected String getServerImageName() { - return "debezium/postgres:13-alpine"; + protected static String getServerImageName() { + return "debezium/postgres:13-bullseye"; } } diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java index 0cce4b29c604..06b058ad87e2 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java @@ -26,7 +26,6 @@ import com.google.common.collect.Streams; import io.airbyte.cdk.db.Database; import io.airbyte.cdk.db.PgLsn; -import io.airbyte.cdk.db.factory.DSLContextFactory; import io.airbyte.cdk.db.factory.DataSourceFactory; import io.airbyte.cdk.db.factory.DatabaseDriver; import io.airbyte.cdk.db.jdbc.DefaultJdbcDatabase; @@ -38,11 +37,10 @@ import io.airbyte.cdk.integrations.debezium.internals.postgres.PostgresCdcTargetPosition; import io.airbyte.cdk.integrations.debezium.internals.postgres.PostgresReplicationConnection; import io.airbyte.cdk.integrations.util.ConnectorExceptionUtil; -import io.airbyte.cdk.testutils.PostgreSQLContainerHelper; +import io.airbyte.cdk.testutils.PostgresTestDatabase; import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.commons.io.IOs; +import io.airbyte.commons.features.FeatureFlagsWrapper; import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.string.Strings; import io.airbyte.commons.util.AutoCloseableIterator; import io.airbyte.commons.util.AutoCloseableIterators; import io.airbyte.protocol.models.Field; @@ -70,83 +68,69 @@ import java.util.Set; import java.util.stream.Collectors; import javax.sql.DataSource; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.DockerImageName; -import org.testcontainers.utility.MountableFile; -import uk.org.webcompere.systemstubs.environment.EnvironmentVariables; -import uk.org.webcompere.systemstubs.jupiter.SystemStub; -import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; - -@ExtendWith(SystemStubsExtension.class) -public class CdcPostgresSourceTest extends CdcSourceTest { - @SystemStub - private EnvironmentVariables environmentVariables; +public class CdcPostgresSourceTest extends CdcSourceTest { - protected static final String SLOT_NAME_BASE = "debezium_slot"; - protected static final String PUBLICATION = "publication"; + protected String publication; protected static final int INITIAL_WAITING_SECONDS = 15; - private PostgreSQLContainer container; - - protected String dbName; - protected Database database; - private DSLContext dslContext; private PostgresSource source; + + private PostgresTestDatabase testdb; + private JsonNode config; private String fullReplicationSlot; - private final String cleanUserName = "airbyte_test"; + private String cleanUserVanillaName, cleanUserReplicationName, cleanUserSuperName; private final String cleanUserPassword = "password"; protected String getPluginName() { return "pgoutput"; } - @AfterEach - void tearDown() { - dslContext.close(); - container.close(); - } - @BeforeEach protected void setup() throws SQLException { - final DockerImageName myImage = DockerImageName.parse(getServerImageName()).asCompatibleSubstituteFor("postgres"); - container = new PostgreSQLContainer<>(myImage) - .withCopyFileToContainer(MountableFile.forClasspathResource("postgresql.conf"), "/etc/postgresql/postgresql.conf") - .withCommand("postgres -c config_file=/etc/postgresql/postgresql.conf"); - container.start(); - environmentVariables.set(EnvVariableFeatureFlags.USE_STREAM_CAPABLE_STATE, "true"); source = new PostgresSource(); - dbName = Strings.addRandomSuffix("db", "_", 10).toLowerCase(); - - final String initScriptName = "init_" + dbName.concat(".sql"); - final String tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE " + dbName + ";"); - PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), container); - - config = getConfig(dbName, container.getUsername(), container.getPassword()); - fullReplicationSlot = SLOT_NAME_BASE + "_" + dbName; - dslContext = getDslContext(config); - database = getDatabase(dslContext); + source.setFeatureFlags(FeatureFlagsWrapper.overridingUseStreamCapableState(new EnvVariableFeatureFlags(), true)); + testdb = PostgresTestDatabase.make(getServerImageName(), "withConf"); + fullReplicationSlot = testdb.withSuffix("debezium_slot"); + publication = testdb.withSuffix("publication"); + config = getConfig(testdb.dbName, testdb.userName, testdb.password); + cleanUserSuperName = testdb.withSuffix("super_user"); + cleanUserReplicationName = testdb.withSuffix("replication_user"); + cleanUserVanillaName = testdb.withSuffix("vanilla_user"); super.setup(); - database.query(ctx -> { + testdb.database.query(ctx -> { ctx.execute("SELECT pg_create_logical_replication_slot('" + fullReplicationSlot + "', '" + getPluginName() + "');"); - ctx.execute("CREATE PUBLICATION " + PUBLICATION + " FOR ALL TABLES;"); - + ctx.execute("CREATE PUBLICATION " + publication + " FOR ALL TABLES;"); + ctx.execute("CREATE USER " + cleanUserSuperName + " PASSWORD '" + cleanUserPassword + "';"); + ctx.execute("ALTER USER " + cleanUserSuperName + " SUPERUSER;"); + ctx.execute("CREATE USER " + cleanUserReplicationName + " PASSWORD '" + cleanUserPassword + "';"); + ctx.execute("ALTER USER " + cleanUserReplicationName + " REPLICATION;"); + ctx.execute("CREATE USER " + cleanUserVanillaName + " PASSWORD '" + cleanUserPassword + "';"); return null; }); + } + @AfterEach + protected void tearDown() throws SQLException { + testdb.database.query(ctx -> { + ctx.execute("DROP USER " + cleanUserVanillaName + ";"); + ctx.execute("DROP USER " + cleanUserReplicationName + ";"); + ctx.execute("DROP USER " + cleanUserSuperName + ";"); + ctx.execute("DROP PUBLICATION " + publication + " CASCADE;"); + ctx.execute("SELECT pg_drop_replication_slot('" + fullReplicationSlot + "');"); + return null; + }); + testdb.close(); } private JsonNode getConfig(final String dbName, final String userName, final String userPassword) { final JsonNode replicationMethod = getReplicationMethod(dbName); return Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, container.getHost()) - .put(JdbcUtils.PORT_KEY, container.getFirstMappedPort()) + .put(JdbcUtils.HOST_KEY, testdb.container.getHost()) + .put(JdbcUtils.PORT_KEY, testdb.container.getFirstMappedPort()) .put(JdbcUtils.DATABASE_KEY, dbName) .put(JdbcUtils.SCHEMAS_KEY, List.of(MODELS_SCHEMA, MODELS_SCHEMA + "_random")) .put(JdbcUtils.USERNAME_KEY, userName) @@ -161,66 +145,31 @@ private JsonNode getConfig(final String dbName, final String userName, final Str private JsonNode getReplicationMethod(final String dbName) { return Jsons.jsonNode(ImmutableMap.builder() .put("method", "CDC") - .put("replication_slot", SLOT_NAME_BASE + "_" + dbName) - .put("publication", PUBLICATION) + .put("replication_slot", fullReplicationSlot) + .put("publication", publication) .put("plugin", getPluginName()) .put("initial_waiting_seconds", INITIAL_WAITING_SECONDS) .put("lsn_commit_behaviour", "After loading Data in the destination") .build()); } - private static Database getDatabase(final DSLContext dslContext) { - return new Database(dslContext); - } - - private static DSLContext getDslContext(final JsonNode config) { - return DSLContextFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText()), - SQLDialect.POSTGRES); - } - - /** - * Creates a new user without privileges for the access tests - */ - private void createCleanUser() { - executeQuery("CREATE USER " + cleanUserName + " PASSWORD '" + cleanUserPassword + "';"); - } - - /** - * Grants privilege to a user (SUPERUSER, REPLICATION, ...) - */ - private void grantUserPrivilege(final String userName, final String postgresPrivilege) { - executeQuery("ALTER USER " + userName + " " + postgresPrivilege + ";"); - } - @Test void testCheckReplicationAccessSuperUserPrivilege() throws Exception { - createCleanUser(); - final JsonNode test_config = getConfig(dbName, cleanUserName, cleanUserPassword); - grantUserPrivilege(cleanUserName, "SUPERUSER"); + final JsonNode test_config = getConfig(testdb.dbName, cleanUserSuperName, cleanUserPassword); final AirbyteConnectionStatus status = source.check(test_config); assertEquals(AirbyteConnectionStatus.Status.SUCCEEDED, status.getStatus()); } @Test void testCheckReplicationAccessReplicationPrivilege() throws Exception { - createCleanUser(); - final JsonNode test_config = getConfig(dbName, cleanUserName, cleanUserPassword); - grantUserPrivilege(cleanUserName, "REPLICATION"); + final JsonNode test_config = getConfig(testdb.dbName, cleanUserReplicationName, cleanUserPassword); final AirbyteConnectionStatus status = source.check(test_config); assertEquals(AirbyteConnectionStatus.Status.SUCCEEDED, status.getStatus()); } @Test void testCheckWithoutReplicationPermission() throws Exception { - createCleanUser(); - final JsonNode test_config = getConfig(dbName, cleanUserName, cleanUserPassword); + final JsonNode test_config = getConfig(testdb.dbName, cleanUserVanillaName, cleanUserPassword); final AirbyteConnectionStatus status = source.check(test_config); assertEquals(AirbyteConnectionStatus.Status.FAILED, status.getStatus()); assertEquals(String.format(ConnectorExceptionUtil.COMMON_EXCEPTION_MESSAGE_TEMPLATE, @@ -230,18 +179,18 @@ void testCheckWithoutReplicationPermission() throws Exception { @Test void testCheckWithoutPublication() throws Exception { - database.query(ctx -> ctx.execute("DROP PUBLICATION " + PUBLICATION + ";")); + testdb.database.query(ctx -> ctx.execute("DROP PUBLICATION " + publication + ";")); final AirbyteConnectionStatus status = source.check(getConfig()); assertEquals(status.getStatus(), AirbyteConnectionStatus.Status.FAILED); + testdb.database.query(ctx -> ctx.execute("CREATE PUBLICATION " + publication + " FOR ALL TABLES;")); } @Test void testCheckWithoutReplicationSlot() throws Exception { - final String fullReplicationSlot = SLOT_NAME_BASE + "_" + dbName; - database.query(ctx -> ctx.execute("SELECT pg_drop_replication_slot('" + fullReplicationSlot + "');")); - + testdb.database.query(ctx -> ctx.execute("SELECT pg_drop_replication_slot('" + fullReplicationSlot + "');")); final AirbyteConnectionStatus status = source.check(getConfig()); assertEquals(status.getStatus(), AirbyteConnectionStatus.Status.FAILED); + testdb.database.query(ctx -> ctx.execute("SELECT pg_create_logical_replication_slot('" + fullReplicationSlot + "', '" + getPluginName() + "');")); } @Override @@ -573,7 +522,7 @@ protected JsonNode getConfig() { @Override protected Database getDatabase() { - return database; + return testdb.database; } @Override @@ -595,8 +544,8 @@ void testDiscoverFiltersNonPublication() throws Exception { // for one of the tests and assert that both streams end up in the catalog. However, the stream that // is not associated with // a publication should only have SyncMode.FULL_REFRESH as a supported sync mode. - database.query(ctx -> ctx.execute("DROP PUBLICATION " + PUBLICATION + ";")); - database.query(ctx -> ctx.execute(String.format("CREATE PUBLICATION " + PUBLICATION + " FOR TABLE %s.%s", MODELS_SCHEMA, "models"))); + testdb.database.query(ctx -> ctx.execute("DROP PUBLICATION " + publication + ";")); + testdb.database.query(ctx -> ctx.execute(String.format("CREATE PUBLICATION " + publication + " FOR TABLE %s.%s", MODELS_SCHEMA, "models"))); final AirbyteCatalog catalog = source.discover(getConfig()); assertEquals(catalog.getStreams().size(), 2); @@ -616,6 +565,8 @@ void testDiscoverFiltersNonPublication() throws Exception { assertEquals(streamNotInPublication.getSupportedSyncModes(), List.of(SyncMode.FULL_REFRESH)); assertTrue(streamNotInPublication.getSourceDefinedPrimaryKey().isEmpty()); assertFalse(streamNotInPublication.getSourceDefinedCursor()); + testdb.database.query(ctx -> ctx.execute("DROP PUBLICATION " + publication + ";")); + testdb.database.query(ctx -> ctx.execute("CREATE PUBLICATION " + publication + " FOR ALL TABLES")); } @Test @@ -679,8 +630,8 @@ protected void syncShouldHandlePurgedLogsGracefully() throws Exception { final int recordsToCreate = 20; final JsonNode config = getConfig(); - final JsonNode replicationMethod = ((ObjectNode) getReplicationMethod(config.get(JdbcUtils.DATABASE_KEY).asText())).put("lsn_commit_behaviour", - "While reading Data"); + final JsonNode replicationMethod = ((ObjectNode) getReplicationMethod(config.get(JdbcUtils.DATABASE_KEY).asText())) + .put("lsn_commit_behaviour", "While reading Data"); ((ObjectNode) config).put("replication_method", replicationMethod); final AutoCloseableIterator firstBatchIterator = getSource() @@ -690,13 +641,7 @@ protected void syncShouldHandlePurgedLogsGracefully() throws Exception { final List stateAfterFirstBatch = extractStateMessages(dataFromFirstBatch); assertExpectedStateMessages(stateAfterFirstBatch); // second batch of records again 20 being created - for (int recordsCreated = 0; recordsCreated < recordsToCreate; recordsCreated++) { - final JsonNode record = - Jsons.jsonNode(ImmutableMap - .of(COL_ID, 200 + recordsCreated, COL_MAKE_ID, 1, COL_MODEL, - "F-" + recordsCreated)); - writeModelRecord(record); - } + bulkInsertRecords(recordsToCreate); // Extract the last state message final JsonNode state = Jsons.jsonNode(Collections.singletonList(stateAfterFirstBatch.get(stateAfterFirstBatch.size() - 1))); @@ -777,13 +722,7 @@ protected void syncShouldIncrementLSN() throws Exception { assertLsnPositionForSyncShouldIncrementLSN(replicationSlotAtTheBeginning, replicationSlotAfterFirstSync, 1); // second batch of records again 20 being created - for (int recordsCreated = 0; recordsCreated < recordsToCreate; recordsCreated++) { - final JsonNode record = - Jsons.jsonNode(ImmutableMap - .of(COL_ID, 200 + recordsCreated, COL_MAKE_ID, 1, COL_MODEL, - "F-" + recordsCreated)); - writeModelRecord(record); - } + bulkInsertRecords(recordsToCreate); final JsonNode stateAfterFirstSync = Jsons.jsonNode(Collections.singletonList(stateAfterFirstBatch.get(stateAfterFirstBatch.size() - 1))); final AutoCloseableIterator secondBatchIterator = getSource() @@ -885,14 +824,7 @@ protected void verifyCheckpointStatesByRecords() throws Exception { // As first `read` operation is from snapshot, it would generate only one state message at the end // of the process. assertExpectedStateMessages(stateMessages); - - for (int recordsCreated = 0; recordsCreated < recordsToCreate; recordsCreated++) { - final JsonNode record = - Jsons.jsonNode(ImmutableMap - .of(COL_ID, 200 + recordsCreated, COL_MAKE_ID, 1, COL_MODEL, - "F-" + recordsCreated)); - writeModelRecord(record); - } + bulkInsertRecords(recordsToCreate); final JsonNode stateAfterFirstSync = Jsons.jsonNode(Collections.singletonList(stateMessages.get(stateMessages.size() - 1))); final AutoCloseableIterator secondBatchIterator = getSource() @@ -926,14 +858,8 @@ protected void verifyCheckpointStatesBySeconds() throws Exception { // As first `read` operation is from snapshot, it would generate only one state message at the end // of the process. assertExpectedStateMessages(stateMessages); + bulkInsertRecords(recordsToCreate); - for (int recordsCreated = 0; recordsCreated < recordsToCreate; recordsCreated++) { - final JsonNode record = - Jsons.jsonNode(ImmutableMap - .of(COL_ID, 200 + recordsCreated, COL_MAKE_ID, 1, COL_MODEL, - "F-" + recordsCreated)); - writeModelRecord(record); - } final JsonNode config = getConfig(); ((ObjectNode) config).put(SYNC_CHECKPOINT_DURATION_PROPERTY, 1); ((ObjectNode) config).put(SYNC_CHECKPOINT_RECORDS_PROPERTY, 100_000); @@ -961,13 +887,9 @@ protected void ctidIteratorPageSizeTest() throws Exception { final Set expectedIds = new HashSet<>(); MODEL_RECORDS.forEach(c -> expectedIds.add(c.get(COL_ID).asInt())); + bulkInsertRecords(recordsToCreate); for (int recordsCreated = 0; recordsCreated < recordsToCreate; recordsCreated++) { final int id = 200 + recordsCreated; - final JsonNode record = - Jsons.jsonNode(ImmutableMap - .of(COL_ID, id, COL_MAKE_ID, 1, COL_MODEL, - "F-" + recordsCreated)); - writeModelRecord(record); expectedIds.add(id); } @@ -993,6 +915,21 @@ protected void ctidIteratorPageSizeTest() throws Exception { }); } + private void bulkInsertRecords(int recordsToCreate) { + final var bulkInsertQuery = String.format(""" + INSERT INTO %s.%s (%s, %s, %s) + SELECT + 200 + generate_series AS id, + 1 AS make_id, + 'F-' || generate_series AS model + FROM generate_series(0, %d - 1); + """, + MODELS_SCHEMA, MODELS_STREAM_NAME, + COL_ID, COL_MAKE_ID, COL_MODEL, + recordsToCreate); + executeQuery(bulkInsertQuery); + } + @Override protected void compareTargetPositionFromTheRecordsWithTargetPostionGeneratedBeforeSync(final CdcTargetPosition targetPosition, final AirbyteRecordMessage record) { @@ -1008,8 +945,8 @@ protected void compareTargetPositionFromTheRecordsWithTargetPostionGeneratedBefo assertTrue(extractPosition(record.getData()).targetLsn.compareTo(((PostgresCdcTargetPosition) targetPosition).targetLsn) >= 0); } - protected String getServerImageName() { - return "debezium/postgres:15-alpine"; + protected static String getServerImageName() { + return "postgres:16-bullseye"; } } diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CloudDeploymentPostgresSourceTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CloudDeploymentPostgresSourceTest.java index d22f34cbd478..5f245d7690ef 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CloudDeploymentPostgresSourceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CloudDeploymentPostgresSourceTest.java @@ -11,153 +11,130 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; import io.airbyte.cdk.db.jdbc.JdbcUtils; +import io.airbyte.cdk.integrations.base.Source; import io.airbyte.cdk.integrations.base.adaptive.AdaptiveSourceRunner; import io.airbyte.cdk.integrations.base.ssh.SshBastionContainer; import io.airbyte.cdk.integrations.base.ssh.SshHelpers; import io.airbyte.cdk.integrations.base.ssh.SshTunnel; +import io.airbyte.cdk.testutils.PostgresTestDatabase; +import io.airbyte.commons.features.EnvVariableFeatureFlags; +import io.airbyte.commons.features.FeatureFlagsWrapper; import io.airbyte.commons.json.Jsons; import io.airbyte.protocol.models.v0.AirbyteConnectionStatus; import java.util.List; import java.util.Map; import java.util.Objects; -import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; import org.testcontainers.containers.Network; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.DockerImageName; -import uk.org.webcompere.systemstubs.environment.EnvironmentVariables; -import uk.org.webcompere.systemstubs.jupiter.SystemStub; -import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; -@ExtendWith(SystemStubsExtension.class) public class CloudDeploymentPostgresSourceTest { - @SystemStub - private EnvironmentVariables environmentVariables; + static PostgresTestDatabase DB_NO_SSL_WITH_NETWORK, DB_WITH_SSL, DB_WITH_SSL_WITH_NETWORK; + static SshBastionContainer BASTION_NO_SSL, BASTION_WITH_SSL; + static Network NETWORK_NO_SSL, NETWORK_WITH_SSL; - private final PostgreSQLContainer postgreSQLContainerNoSSL = new PostgreSQLContainer<>("postgres:13-alpine"); - private final PostgreSQLContainer postgreSQLContainerWithSSL = - new PostgreSQLContainer<>(DockerImageName.parse("marcosmarxm/postgres-ssl:dev").asCompatibleSubstituteFor("postgres")) - .withCommand("postgres -c ssl=on -c ssl_cert_file=/var/lib/postgresql/server.crt -c ssl_key_file=/var/lib/postgresql/server.key"); - private static final List NON_STRICT_SSL_MODES = List.of("disable", "allow", "prefer"); - private static final String SSL_MODE_REQUIRE = "require"; + @BeforeAll + static void setupContainers() { + DB_NO_SSL_WITH_NETWORK = PostgresTestDatabase.make("postgres:16-bullseye", "withNetwork"); + NETWORK_NO_SSL = DB_NO_SSL_WITH_NETWORK.container.getNetwork(); + BASTION_NO_SSL = new SshBastionContainer(); + BASTION_NO_SSL.initAndStartBastion(NETWORK_NO_SSL); - private static final SshBastionContainer bastion = new SshBastionContainer(); - private static final Network network = Network.newNetwork(); + DB_WITH_SSL = PostgresTestDatabase.make("marcosmarxm/postgres-ssl:dev", "withSSL"); - @BeforeEach - void setup() { - environmentVariables.set(AdaptiveSourceRunner.DEPLOYMENT_MODE_KEY, AdaptiveSourceRunner.CLOUD_MODE); + DB_WITH_SSL_WITH_NETWORK = PostgresTestDatabase.make("marcosmarxm/postgres-ssl:dev", "withSSL", "withNetwork"); + NETWORK_WITH_SSL = DB_WITH_SSL_WITH_NETWORK.container.getNetwork(); + BASTION_WITH_SSL = new SshBastionContainer(); + BASTION_WITH_SSL.initAndStartBastion(NETWORK_WITH_SSL); } - @Test - void testSSlModesDisableAllowPreferWithTunnelIfServerDoesNotSupportSSL() throws Exception { + @AfterAll + static void tearDownContainers() { + BASTION_NO_SSL.stopAndClose(); + BASTION_WITH_SSL.stopAndClose(); + DB_NO_SSL_WITH_NETWORK.close(); + DB_WITH_SSL_WITH_NETWORK.close(); + DB_WITH_SSL.close(); + } - try (final PostgreSQLContainer db = postgreSQLContainerNoSSL.withNetwork(network)) { - bastion.initAndStartBastion(network); - db.start(); + private static final List NON_STRICT_SSL_MODES = List.of("disable", "allow", "prefer"); + private static final String SSL_MODE_REQUIRE = "require"; - for (final String sslmode : NON_STRICT_SSL_MODES) { - final AirbyteConnectionStatus connectionStatus = checkWithTunnel(db, sslmode, false); - assertEquals(AirbyteConnectionStatus.Status.SUCCEEDED, connectionStatus.getStatus()); - } + private Source source() { + PostgresSource source = new PostgresSource(); + source.setFeatureFlags( + FeatureFlagsWrapper.overridingDeploymentMode( + FeatureFlagsWrapper.overridingUseStreamCapableState( + new EnvVariableFeatureFlags(), + true), + AdaptiveSourceRunner.CLOUD_MODE)); + return PostgresSource.sshWrappedSource(source); + } - } finally { - bastion.stopAndClose(); + @Test + void testSSlModesDisableAllowPreferWithTunnelIfServerDoesNotSupportSSL() throws Exception { + for (final String sslmode : NON_STRICT_SSL_MODES) { + final AirbyteConnectionStatus connectionStatus = checkWithTunnel(DB_NO_SSL_WITH_NETWORK, BASTION_NO_SSL, sslmode); + assertEquals(AirbyteConnectionStatus.Status.SUCCEEDED, connectionStatus.getStatus()); } } @Test void testSSlModesDisableAllowPreferWithTunnelIfServerSupportSSL() throws Exception { - try (final PostgreSQLContainer db = postgreSQLContainerWithSSL.withNetwork(network)) { - - bastion.initAndStartBastion(network); - db.start(); - for (final String sslmode : NON_STRICT_SSL_MODES) { - - final AirbyteConnectionStatus connectionStatus = checkWithTunnel(db, sslmode, false); - assertEquals(AirbyteConnectionStatus.Status.SUCCEEDED, connectionStatus.getStatus()); - } - } finally { - bastion.stopAndClose(); + for (final String sslmode : NON_STRICT_SSL_MODES) { + final AirbyteConnectionStatus connectionStatus = checkWithTunnel(DB_WITH_SSL_WITH_NETWORK, BASTION_WITH_SSL, sslmode); + assertEquals(AirbyteConnectionStatus.Status.SUCCEEDED, connectionStatus.getStatus()); } } @Test void testSSlModesDisableAllowPreferWithFailedTunnelIfServerSupportSSL() throws Exception { - try (final PostgreSQLContainer db = postgreSQLContainerWithSSL) { - - bastion.initAndStartBastion(network); - db.start(); - for (final String sslmode : NON_STRICT_SSL_MODES) { - - final AirbyteConnectionStatus connectionStatus = checkWithTunnel(db, sslmode, false); - assertEquals(AirbyteConnectionStatus.Status.FAILED, connectionStatus.getStatus()); - final String msg = connectionStatus.getMessage(); - assertTrue(msg.matches(".*Connection is not available.*|.*The connection attempt failed.*"), msg); - } - } finally { - bastion.stopAndClose(); + for (final String sslmode : NON_STRICT_SSL_MODES) { + final AirbyteConnectionStatus connectionStatus = checkWithTunnel(DB_WITH_SSL, BASTION_WITH_SSL, sslmode); + assertEquals(AirbyteConnectionStatus.Status.FAILED, connectionStatus.getStatus()); + final String msg = connectionStatus.getMessage(); + assertTrue(msg.matches(".*Connection is not available.*|.*The connection attempt failed.*"), msg); } } @Test void testSSlRequiredWithTunnelIfServerDoesNotSupportSSL() throws Exception { - - try (final PostgreSQLContainer db = postgreSQLContainerNoSSL.withNetwork(network)) { - bastion.initAndStartBastion(network); - db.start(); - final AirbyteConnectionStatus connectionStatus = checkWithTunnel(db, SSL_MODE_REQUIRE, false); - assertEquals(AirbyteConnectionStatus.Status.FAILED, connectionStatus.getStatus()); - assertEquals("State code: 08004; Message: The server does not support SSL.", connectionStatus.getMessage()); - - } finally { - bastion.stopAndClose(); - } + final AirbyteConnectionStatus connectionStatus = checkWithTunnel(DB_NO_SSL_WITH_NETWORK, BASTION_NO_SSL, SSL_MODE_REQUIRE); + assertEquals(AirbyteConnectionStatus.Status.FAILED, connectionStatus.getStatus()); + assertEquals("State code: 08004; Message: The server does not support SSL.", connectionStatus.getMessage()); } @Test void testSSlRequiredNoTunnelIfServerSupportSSL() throws Exception { - - try (final PostgreSQLContainer db = postgreSQLContainerWithSSL) { - db.start(); - - final ImmutableMap configBuilderWithSSLMode = getDatabaseConfigBuilderWithSSLMode(db, SSL_MODE_REQUIRE, false).build(); - final JsonNode config = Jsons.jsonNode(configBuilderWithSSLMode); - addNoTunnel((ObjectNode) config); - final AirbyteConnectionStatus connectionStatus = PostgresSource.sshWrappedSource().check(config); - assertEquals(AirbyteConnectionStatus.Status.SUCCEEDED, connectionStatus.getStatus()); - } + final ImmutableMap configBuilderWithSSLMode = getDatabaseConfigBuilderWithSSLMode( + DB_WITH_SSL, SSL_MODE_REQUIRE, false).build(); + final JsonNode config = Jsons.jsonNode(configBuilderWithSSLMode); + addNoTunnel((ObjectNode) config); + final AirbyteConnectionStatus connectionStatus = source().check(config); + assertEquals(AirbyteConnectionStatus.Status.SUCCEEDED, connectionStatus.getStatus()); } @Test void testStrictSSLSecuredWithTunnel() throws Exception { - - try (final PostgreSQLContainer db = postgreSQLContainerWithSSL.withNetwork(network)) { - - bastion.initAndStartBastion(network); - db.start(); - - final AirbyteConnectionStatus connectionStatus = checkWithTunnel(db, SSL_MODE_REQUIRE, false); - assertEquals(AirbyteConnectionStatus.Status.SUCCEEDED, connectionStatus.getStatus()); - } finally { - bastion.stopAndClose(); - } + final AirbyteConnectionStatus connectionStatus = checkWithTunnel(DB_WITH_SSL_WITH_NETWORK, BASTION_WITH_SSL, SSL_MODE_REQUIRE); + assertEquals(AirbyteConnectionStatus.Status.SUCCEEDED, connectionStatus.getStatus()); } - private ImmutableMap.Builder getDatabaseConfigBuilderWithSSLMode(final PostgreSQLContainer db, + private ImmutableMap.Builder getDatabaseConfigBuilderWithSSLMode(final PostgresTestDatabase db, final String sslMode, final boolean innerAddress) { - final var containerAddress = innerAddress ? SshHelpers.getInnerContainerAddress(db) : SshHelpers.getOuterContainerAddress(db); + final var containerAddress = innerAddress + ? SshHelpers.getInnerContainerAddress(db.container) + : SshHelpers.getOuterContainerAddress(db.container); return ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, Objects.requireNonNull( - containerAddress.left)) + .put(JdbcUtils.HOST_KEY, Objects.requireNonNull(containerAddress.left)) .put(JdbcUtils.PORT_KEY, containerAddress.right) - .put(JdbcUtils.DATABASE_KEY, db.getDatabaseName()) + .put(JdbcUtils.DATABASE_KEY, db.dbName) .put(JdbcUtils.SCHEMAS_KEY, List.of("public")) - .put(JdbcUtils.USERNAME_KEY, db.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, db.getPassword()) + .put(JdbcUtils.USERNAME_KEY, db.userName) + .put(JdbcUtils.PASSWORD_KEY, db.password) .put(JdbcUtils.SSL_MODE_KEY, Map.of(JdbcUtils.MODE_KEY, sslMode)); } @@ -179,18 +156,18 @@ void testSslModesUnsecuredNoTunnel() throws Exception { final JsonNode config = getMockedSSLConfig(sslMode); addNoTunnel((ObjectNode) config); - final AirbyteConnectionStatus connectionStatus = PostgresSource.sshWrappedSource().check(config); + final AirbyteConnectionStatus connectionStatus = source().check(config); assertEquals(AirbyteConnectionStatus.Status.FAILED, connectionStatus.getStatus()); - assertTrue(connectionStatus.getMessage().contains("Unsecured connection not allowed")); + assertTrue(connectionStatus.getMessage().contains("Unsecured connection not allowed"), connectionStatus.getMessage()); } } - private AirbyteConnectionStatus checkWithTunnel(final PostgreSQLContainer db, final String sslmode, final boolean innerAddress) - throws Exception { - final ImmutableMap.Builder configBuilderWithSSLMode = getDatabaseConfigBuilderWithSSLMode(db, sslmode, true); + private AirbyteConnectionStatus checkWithTunnel(final PostgresTestDatabase db, SshBastionContainer bastion, final String sslmode) throws Exception { + final var configBuilderWithSSLMode = getDatabaseConfigBuilderWithSSLMode(db, sslmode, true); final JsonNode configWithSSLModeDisable = - bastion.getTunnelConfig(SshTunnel.TunnelMethod.SSH_PASSWORD_AUTH, configBuilderWithSSLMode, innerAddress); - return PostgresSource.sshWrappedSource().check(configWithSSLModeDisable); + bastion.getTunnelConfig(SshTunnel.TunnelMethod.SSH_PASSWORD_AUTH, configBuilderWithSSLMode, false); + ((ObjectNode) configWithSSLModeDisable).put(JdbcUtils.JDBC_URL_PARAMS_KEY, "connectTimeout=1"); + return source().check(configWithSSLModeDisable); } private static void addNoTunnel(final ObjectNode config) { diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresCdcGetPublicizedTablesTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresCdcGetPublicizedTablesTest.java index 0381656ab45b..85aaf6c87ce6 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresCdcGetPublicizedTablesTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresCdcGetPublicizedTablesTest.java @@ -9,29 +9,18 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.Database; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; import io.airbyte.cdk.db.jdbc.DefaultJdbcDatabase; import io.airbyte.cdk.db.jdbc.JdbcDatabase; import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.testutils.PostgreSQLContainerHelper; -import io.airbyte.commons.io.IOs; +import io.airbyte.cdk.testutils.PostgresTestDatabase; import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.string.Strings; import io.airbyte.protocol.models.v0.AirbyteStreamNameNamespacePair; import java.sql.SQLException; import java.util.List; import java.util.Set; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.DockerImageName; -import org.testcontainers.utility.MountableFile; /** * This class tests the {@link PostgresCatalogHelper#getPublicizedTables} method. @@ -39,103 +28,72 @@ class PostgresCdcGetPublicizedTablesTest { private static final String SCHEMA_NAME = "public"; - private static final String PUBLICATION = "publication_test_12"; - private static final String REPLICATION_SLOT = "replication_slot_test_12"; protected static final int INITIAL_WAITING_SECONDS = 30; - private static PostgreSQLContainer container; - private JsonNode config; - - @BeforeAll - static void init() { - final DockerImageName myImage = DockerImageName.parse("debezium/postgres:13-alpine").asCompatibleSubstituteFor("postgres"); - container = new PostgreSQLContainer<>(myImage) - .withCopyFileToContainer(MountableFile.forClasspathResource("postgresql.conf"), "/etc/postgresql/postgresql.conf") - .withCommand("postgres -c config_file=/etc/postgresql/postgresql.conf"); - container.start(); - } - - @AfterAll - static void cleanUp() { - container.close(); - } + private String publication; + private String replicationSlot; + private PostgresTestDatabase testdb; @BeforeEach void setup() throws Exception { - final String dbName = Strings.addRandomSuffix("db", "_", 10).toLowerCase(); - final String initScriptName = "init_" + dbName.concat(".sql"); - final String tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE " + dbName + ";"); - PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), container); - - this.config = getConfig(container, dbName); + testdb = PostgresTestDatabase.make("postgres:16-bullseye", "withConf"); + replicationSlot = testdb.withSuffix("replication_slot"); + publication = testdb.withSuffix("publication"); + testdb.database.query(ctx -> { + ctx.execute("create table table_1 (id serial primary key, text_column text);"); + ctx.execute("create table table_2 (id serial primary key, text_column text);"); + ctx.execute("create table table_irrelevant (id serial primary key, text_column text);"); + ctx.execute("SELECT pg_create_logical_replication_slot('" + replicationSlot + "', 'pgoutput');"); + // create a publication including table_1 and table_2, but not table_irrelevant + ctx.execute("CREATE PUBLICATION " + publication + " FOR TABLE table_1, table_2;"); + return null; + }); + } - try (final DSLContext dslContext = getDslContext(config)) { - final Database database = getDatabase(dslContext); - database.query(ctx -> { - ctx.execute("create table table_1 (id serial primary key, text_column text);"); - ctx.execute("create table table_2 (id serial primary key, text_column text);"); - ctx.execute("create table table_irrelevant (id serial primary key, text_column text);"); - ctx.execute("SELECT pg_create_logical_replication_slot('" + REPLICATION_SLOT + "', 'pgoutput');"); - // create a publication including table_1 and table_2, but not table_irrelevant - ctx.execute("CREATE PUBLICATION " + PUBLICATION + " FOR TABLE table_1, table_2;"); - return null; - }); - } + @AfterEach + void tearDown() throws SQLException { + testdb.database.query(ctx -> { + ctx.execute("DROP PUBLICATION " + publication + ";"); + ctx.execute("SELECT pg_drop_replication_slot('" + replicationSlot + "');"); + return null; + }); + testdb.close(); } - private JsonNode getConfig(final PostgreSQLContainer psqlDb, final String dbName) { + private JsonNode getConfig() { return Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, psqlDb.getHost()) - .put(JdbcUtils.PORT_KEY, psqlDb.getFirstMappedPort()) - .put(JdbcUtils.DATABASE_KEY, dbName) + .put(JdbcUtils.HOST_KEY, testdb.container.getHost()) + .put(JdbcUtils.PORT_KEY, testdb.container.getFirstMappedPort()) + .put(JdbcUtils.DATABASE_KEY, testdb.dbName) .put(JdbcUtils.SCHEMAS_KEY, List.of(SCHEMA_NAME)) - .put(JdbcUtils.USERNAME_KEY, psqlDb.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, psqlDb.getPassword()) + .put(JdbcUtils.USERNAME_KEY, testdb.userName) + .put(JdbcUtils.PASSWORD_KEY, testdb.password) .put(JdbcUtils.SSL_KEY, false) .put("is_test", true) .build()); } - private static DSLContext getDslContext(final JsonNode config) { - return DSLContextFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText()), - SQLDialect.POSTGRES); - } - - private static Database getDatabase(final DSLContext dslContext) { - return new Database(dslContext); - } - @Test - public void testGetPublicizedTables() { - try (final DSLContext dslContext = getDslContext(config)) { - final JdbcDatabase database = new DefaultJdbcDatabase(dslContext.diagnosticsDataSource()); - // when source config does not exist - assertEquals(0, PostgresCatalogHelper.getPublicizedTables(database).size()); - - // when config is not cdc - database.setSourceConfig(config); - assertEquals(0, PostgresCatalogHelper.getPublicizedTables(database).size()); - - // when config is cdc - ((ObjectNode) config).set("replication_method", Jsons.jsonNode(ImmutableMap.of( - "replication_slot", REPLICATION_SLOT, - "initial_waiting_seconds", INITIAL_WAITING_SECONDS, - "publication", PUBLICATION))); - database.setSourceConfig(config); - final Set expectedTables = Set.of( - new AirbyteStreamNameNamespacePair("table_1", SCHEMA_NAME), - new AirbyteStreamNameNamespacePair("table_2", SCHEMA_NAME)); - // table_irrelevant is not included because it is not part of the publication - assertEquals(expectedTables, PostgresCatalogHelper.getPublicizedTables(database)); - } catch (final SQLException e) { - throw new RuntimeException(e); - } + public void testGetPublicizedTables() throws SQLException { + final JdbcDatabase database = new DefaultJdbcDatabase(testdb.dslContext.diagnosticsDataSource()); + // when source config does not exist + assertEquals(0, PostgresCatalogHelper.getPublicizedTables(database).size()); + + // when config is not cdc + database.setSourceConfig(getConfig()); + assertEquals(0, PostgresCatalogHelper.getPublicizedTables(database).size()); + + // when config is cdc + final ObjectNode cdcConfig = ((ObjectNode) getConfig()); + cdcConfig.set("replication_method", Jsons.jsonNode(ImmutableMap.of( + "replication_slot", replicationSlot, + "initial_waiting_seconds", INITIAL_WAITING_SECONDS, + "publication", publication))); + database.setSourceConfig(cdcConfig); + final Set expectedTables = Set.of( + new AirbyteStreamNameNamespacePair("table_1", SCHEMA_NAME), + new AirbyteStreamNameNamespacePair("table_2", SCHEMA_NAME)); + // table_irrelevant is not included because it is not part of the publication + assertEquals(expectedTables, PostgresCatalogHelper.getPublicizedTables(database)); } } diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresJdbcSourceAcceptanceTest.java index ed044477ebfc..343e736cd629 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresJdbcSourceAcceptanceTest.java @@ -26,6 +26,7 @@ import io.airbyte.cdk.integrations.source.relationaldb.models.DbStreamState; import io.airbyte.cdk.testutils.PostgreSQLContainerHelper; import io.airbyte.commons.features.EnvVariableFeatureFlags; +import io.airbyte.commons.features.FeatureFlagsWrapper; import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; @@ -59,19 +60,11 @@ import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; import org.testcontainers.containers.PostgreSQLContainer; import org.testcontainers.utility.MountableFile; -import uk.org.webcompere.systemstubs.environment.EnvironmentVariables; -import uk.org.webcompere.systemstubs.jupiter.SystemStub; -import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; -@ExtendWith(SystemStubsExtension.class) class PostgresJdbcSourceAcceptanceTest extends JdbcSourceAcceptanceTest { - @SystemStub - private EnvironmentVariables environmentVariables; - private static final String DATABASE = "new_db"; protected static final String USERNAME_WITHOUT_PERMISSION = "new_user"; protected static final String PASSWORD_WITHOUT_PERMISSION = "new_password"; @@ -89,7 +82,6 @@ static void init() { @Override @BeforeEach public void setup() throws Exception { - environmentVariables.set(EnvVariableFeatureFlags.USE_STREAM_CAPABLE_STATE, "true"); final String dbName = Strings.addRandomSuffix("db", "_", 10).toLowerCase(); COLUMN_CLAUSE_WITH_PK = "id INTEGER, name VARCHAR(200) NOT NULL, updated_at DATE NOT NULL, wakeup_at TIMETZ NOT NULL, last_visited_at TIMESTAMPTZ NOT NULL, last_comment_at TIMESTAMP NOT NULL"; @@ -187,6 +179,11 @@ public void setup() throws Exception { INSERT_TABLE_WITHOUT_CURSOR_TYPE_QUERY = "INSERT INTO %s VALUES(B'101');"; } + @Override + protected void maybeSetShorterConnectionTimeout() { + ((ObjectNode) config).put(JdbcUtils.JDBC_URL_PARAMS_KEY, "connectTimeout=1"); + } + @Override protected List getAirbyteMessagesReadOneColumn() { return getTestMessages().stream() @@ -289,7 +286,9 @@ public boolean supportsSchemas() { @Override public AbstractJdbcSource getJdbcSource() { - return new PostgresSource(); + var source = new PostgresSource(); + source.setFeatureFlags(FeatureFlagsWrapper.overridingUseStreamCapableState(new EnvVariableFeatureFlags(), true)); + return source; } @Override @@ -455,6 +454,7 @@ protected boolean supportsPerStream() { */ @Test void testCheckIncorrectPasswordFailure() throws Exception { + maybeSetShorterConnectionTimeout(); ((ObjectNode) config).put(JdbcUtils.PASSWORD_KEY, "fake"); final AirbyteConnectionStatus status = source.check(config); assertEquals(AirbyteConnectionStatus.Status.FAILED, status.getStatus()); @@ -463,6 +463,7 @@ void testCheckIncorrectPasswordFailure() throws Exception { @Test public void testCheckIncorrectUsernameFailure() throws Exception { + maybeSetShorterConnectionTimeout(); ((ObjectNode) config).put(JdbcUtils.USERNAME_KEY, "fake"); final AirbyteConnectionStatus status = source.check(config); assertEquals(AirbyteConnectionStatus.Status.FAILED, status.getStatus()); @@ -471,6 +472,7 @@ public void testCheckIncorrectUsernameFailure() throws Exception { @Test public void testCheckIncorrectHostFailure() throws Exception { + maybeSetShorterConnectionTimeout(); ((ObjectNode) config).put(JdbcUtils.HOST_KEY, "localhost2"); final AirbyteConnectionStatus status = source.check(config); assertEquals(AirbyteConnectionStatus.Status.FAILED, status.getStatus()); @@ -479,6 +481,7 @@ public void testCheckIncorrectHostFailure() throws Exception { @Test public void testCheckIncorrectPortFailure() throws Exception { + maybeSetShorterConnectionTimeout(); ((ObjectNode) config).put(JdbcUtils.PORT_KEY, "30000"); final AirbyteConnectionStatus status = source.check(config); assertEquals(AirbyteConnectionStatus.Status.FAILED, status.getStatus()); @@ -487,6 +490,7 @@ public void testCheckIncorrectPortFailure() throws Exception { @Test public void testCheckIncorrectDataBaseFailure() throws Exception { + maybeSetShorterConnectionTimeout(); ((ObjectNode) config).put(JdbcUtils.DATABASE_KEY, "wrongdatabase"); final AirbyteConnectionStatus status = source.check(config); assertEquals(AirbyteConnectionStatus.Status.FAILED, status.getStatus()); @@ -495,6 +499,7 @@ public void testCheckIncorrectDataBaseFailure() throws Exception { @Test public void testUserHasNoPermissionToDataBase() throws Exception { + maybeSetShorterConnectionTimeout(); database.execute(connection -> connection.createStatement() .execute(String.format("create user %s with password '%s';", USERNAME_WITHOUT_PERMISSION, PASSWORD_WITHOUT_PERMISSION))); database.execute(connection -> connection.createStatement() diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceOperationsTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceOperationsTest.java index fafa31906417..7eee25049e55 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceOperationsTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceOperationsTest.java @@ -10,13 +10,8 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.cdk.db.Database; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; import io.airbyte.cdk.db.jdbc.DateTimeConverter; -import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.integrations.util.HostPortResolver; +import io.airbyte.cdk.testutils.PostgresTestDatabase; import io.airbyte.commons.json.Jsons; import java.sql.Connection; import java.sql.PreparedStatement; @@ -26,71 +21,30 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.MountableFile; class PostgresSourceOperationsTest { private final PostgresSourceOperations postgresSourceOperations = new PostgresSourceOperations(); - private PostgreSQLContainer container; - private Database database; + private PostgresTestDatabase testdb; private final String cursorColumn = "cursor_column"; @BeforeEach - public void init() throws SQLException { - container = new PostgreSQLContainer<>("postgres:14-alpine") - .withCopyFileToContainer(MountableFile.forClasspathResource("postgresql.conf"), - "/etc/postgresql/postgresql.conf") - .withCommand("postgres -c config_file=/etc/postgresql/postgresql.conf"); - container.start(); - final JsonNode replicationMethod = Jsons.jsonNode(ImmutableMap.builder() - .put("method", "Standard") - .build()); - final JsonNode config = Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, HostPortResolver.resolveHost(container)) - .put(JdbcUtils.PORT_KEY, HostPortResolver.resolvePort(container)) - .put(JdbcUtils.DATABASE_KEY, container.getDatabaseName()) - .put(JdbcUtils.USERNAME_KEY, container.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, container.getPassword()) - .put(JdbcUtils.SSL_KEY, false) - .put("replication_method", replicationMethod) - .build()); - - final DSLContext dslContext = DSLContextFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - container.getHost(), - container.getFirstMappedPort(), - config.get(JdbcUtils.DATABASE_KEY).asText()), - SQLDialect.POSTGRES); - database = new Database(dslContext); - database.query(ctx -> { - ctx.execute(String.format("CREATE SCHEMA %S;", container.getDatabaseName())); - return null; - }); + public void init() { + testdb = PostgresTestDatabase.make("postgres:16-bullseye", "withConf"); } @AfterEach public void tearDown() { - try { - - container.close(); - } catch (final Exception e) { - throw new RuntimeException(e); - } + testdb.close(); } @Test public void numericColumnAsCursor() throws SQLException { - final String tableName = container.getDatabaseName() + ".numeric_table"; + final String tableName = "numeric_table"; final String createTableQuery = String.format("CREATE TABLE %s(id INTEGER PRIMARY KEY, %s NUMERIC(38, 0));", tableName, cursorColumn); @@ -110,9 +64,9 @@ public void numericColumnAsCursor() throws SQLException { } final List actualRecords = new ArrayList<>(); - try (final Connection connection = container.createConnection("")) { + try (final Connection connection = testdb.container.createConnection("")) { final PreparedStatement preparedStatement = connection.prepareStatement( - "SELECT * from " + tableName + " WHERE " + cursorColumn + " > ?"); + "SELECT * FROM " + tableName + " WHERE " + cursorColumn + " > ?"); postgresSourceOperations.setCursorField(preparedStatement, 1, PostgresType.NUMERIC, @@ -134,7 +88,7 @@ public void numericColumnAsCursor() throws SQLException { @Test public void timeColumnAsCursor() throws SQLException { - final String tableName = container.getDatabaseName() + ".time_table"; + final String tableName = "time_table"; final String createTableQuery = String.format("CREATE TABLE %s(id INTEGER PRIMARY KEY, %s TIME);", tableName, cursorColumn); @@ -150,7 +104,7 @@ public void timeColumnAsCursor() throws SQLException { } final List actualRecords = new ArrayList<>(); - try (final Connection connection = container.createConnection("")) { + try (final Connection connection = testdb.container.createConnection("")) { final PreparedStatement preparedStatement = connection.prepareStatement( "SELECT * from " + tableName + " WHERE " + cursorColumn + " > ?"); postgresSourceOperations.setCursorField(preparedStatement, @@ -182,13 +136,9 @@ public void testParseMoneyValue() { assertEquals("-1000000.001", PostgresSourceOperations.parseMoneyValue("-£1,000,000.001")); } - protected void executeQuery(final String query) { - try { - database.query( - ctx -> ctx - .execute(query)); - } catch (final SQLException e) { - throw new RuntimeException(e); + protected void executeQuery(final String query) throws SQLException { + try (final Connection connection = testdb.container.createConnection("")) { + connection.createStatement().execute(query); } } diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceSSLTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceSSLTest.java index d8fc65ea15b7..414317dc1626 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceSSLTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceSSLTest.java @@ -16,14 +16,9 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Sets; -import io.airbyte.cdk.db.Database; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.testutils.PostgreSQLContainerHelper; -import io.airbyte.commons.io.IOs; +import io.airbyte.cdk.testutils.PostgresTestDatabase; import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.string.Strings; import io.airbyte.commons.util.MoreIterators; import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaType; @@ -40,15 +35,9 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.DockerImageName; -import org.testcontainers.utility.MountableFile; class PostgresSourceSSLTest { @@ -84,89 +73,47 @@ class PostgresSourceSSLTest { createRecord(STREAM_NAME, map("id", new BigDecimal("2.0"), "name", "vegeta", "power", 9000.1), SCHEMA_NAME), createRecord(STREAM_NAME, map("id", null, "name", "piccolo", "power", null), SCHEMA_NAME)); - private static PostgreSQLContainer PSQL_DB; - - private String dbName; - - @BeforeAll - static void init() { - PSQL_DB = new PostgreSQLContainer<>(DockerImageName.parse("marcosmarxm/postgres-ssl:dev").asCompatibleSubstituteFor("postgres")) - .withCommand("postgres -c ssl=on -c ssl_cert_file=/var/lib/postgresql/server.crt -c ssl_key_file=/var/lib/postgresql/server.key"); - PSQL_DB.start(); - } + private PostgresTestDatabase testdb; @BeforeEach void setup() throws Exception { - dbName = Strings.addRandomSuffix("db", "_", 10).toLowerCase(); - - final String initScriptName = "init_" + dbName.concat(".sql"); - final String tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE " + dbName + ";"); - PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB); - - final JsonNode config = getConfig(PSQL_DB, dbName); - try (final DSLContext dslContext = getDslContext(config)) { - final Database database = getDatabase(dslContext); - database.query(ctx -> { - ctx.fetch( - "CREATE TABLE id_and_name(id NUMERIC(20, 10) NOT NULL, name VARCHAR(200) NOT NULL, power double precision NOT NULL, PRIMARY KEY (id));"); - ctx.fetch("CREATE INDEX i1 ON id_and_name (id);"); - ctx.fetch( - "INSERT INTO id_and_name (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); - - ctx.fetch("CREATE TABLE id_and_name2(id NUMERIC(20, 10) NOT NULL, name VARCHAR(200) NOT NULL, power double precision NOT NULL);"); - ctx.fetch( - "INSERT INTO id_and_name2 (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); - - ctx.fetch( - "CREATE TABLE names(first_name VARCHAR(200) NOT NULL, last_name VARCHAR(200) NOT NULL, power double precision NOT NULL, PRIMARY KEY (first_name, last_name));"); - ctx.fetch( - "INSERT INTO names (first_name, last_name, power) VALUES ('san', 'goku', 'Infinity'), ('prince', 'vegeta', 9000.1), ('piccolo', 'junior', '-Infinity');"); - return null; - }); - } - } - - private static Database getDatabase(final DSLContext dslContext) { - return new Database(dslContext); + testdb = PostgresTestDatabase.make("marcosmarxm/postgres-ssl:dev", "withSSL"); + testdb.database.query(ctx -> { + ctx.fetch( + "CREATE TABLE id_and_name(id NUMERIC(20, 10) NOT NULL, name VARCHAR(200) NOT NULL, power double precision NOT NULL, PRIMARY KEY (id));"); + ctx.fetch("CREATE INDEX i1 ON id_and_name (id);"); + ctx.fetch("INSERT INTO id_and_name (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); + ctx.fetch("CREATE TABLE id_and_name2(id NUMERIC(20, 10) NOT NULL, name VARCHAR(200) NOT NULL, power double precision NOT NULL);"); + ctx.fetch("INSERT INTO id_and_name2 (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); + ctx.fetch( + "CREATE TABLE names(first_name VARCHAR(200) NOT NULL, last_name VARCHAR(200) NOT NULL, power double precision NOT NULL, PRIMARY KEY (first_name, last_name));"); + ctx.fetch( + "INSERT INTO names (first_name, last_name, power) VALUES ('san', 'goku', 'Infinity'), ('prince', 'vegeta', 9000.1), ('piccolo', 'junior', '-Infinity');"); + return null; + }); } - private static DSLContext getDslContext(final JsonNode config) { - return DSLContextFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText()), - SQLDialect.POSTGRES); + @AfterEach + void tearDown() { + testdb.close(); } - private JsonNode getConfig(final PostgreSQLContainer psqlDb, final String dbName) { + private JsonNode getConfig() { return Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, psqlDb.getHost()) - .put(JdbcUtils.PORT_KEY, psqlDb.getFirstMappedPort()) - .put(JdbcUtils.DATABASE_KEY, dbName) + .put(JdbcUtils.HOST_KEY, testdb.container.getHost()) + .put(JdbcUtils.PORT_KEY, testdb.container.getFirstMappedPort()) + .put(JdbcUtils.DATABASE_KEY, testdb.dbName) .put(JdbcUtils.SCHEMAS_KEY, List.of("public")) - .put(JdbcUtils.USERNAME_KEY, psqlDb.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, psqlDb.getPassword()) + .put(JdbcUtils.USERNAME_KEY, testdb.userName) + .put(JdbcUtils.PASSWORD_KEY, testdb.password) .put(JdbcUtils.SSL_KEY, true) .put("ssl_mode", ImmutableMap.builder().put("mode", "require").build()) .build()); } - private JsonNode getConfig(final PostgreSQLContainer psqlDb) { - return getConfig(psqlDb, psqlDb.getDatabaseName()); - } - - @AfterAll - static void cleanUp() { - PSQL_DB.close(); - } - @Test void testDiscoverWithPk() throws Exception { - final AirbyteCatalog actual = new PostgresSource().discover(getConfig(PSQL_DB, dbName)); + final AirbyteCatalog actual = new PostgresSource().discover(getConfig()); actual.getStreams().forEach(actualStream -> { final Optional expectedStream = CATALOG.getStreams().stream().filter(stream -> stream.getName().equals(actualStream.getName())).findAny(); @@ -181,7 +128,7 @@ void testReadSuccess() throws Exception { CONFIGURED_CATALOG.withStreams(CONFIGURED_CATALOG.getStreams().stream().filter(s -> s.getStream().getName().equals(STREAM_NAME)) .collect(Collectors.toList())); - final Set actualMessages = MoreIterators.toSet(new PostgresSource().read(getConfig(PSQL_DB, dbName), configuredCatalog, null)); + final Set actualMessages = MoreIterators.toSet(new PostgresSource().read(getConfig(), configuredCatalog, null)); setEmittedAtToNull(actualMessages); assertEquals(ASCII_MESSAGES, actualMessages); @@ -189,7 +136,7 @@ void testReadSuccess() throws Exception { @Test void testIsCdc() { - final JsonNode config = getConfig(PSQL_DB, dbName); + final JsonNode config = getConfig(); assertFalse(PostgresUtils.isCdc(config)); diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceTest.java index 3501515e8a6b..a1069c9b00b4 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceTest.java @@ -26,12 +26,11 @@ import io.airbyte.cdk.integrations.source.relationaldb.CursorInfo; import io.airbyte.cdk.integrations.source.relationaldb.state.StateManager; import io.airbyte.cdk.integrations.source.relationaldb.state.StateManagerFactory; -import io.airbyte.cdk.testutils.PostgreSQLContainerHelper; +import io.airbyte.cdk.testutils.PostgresTestDatabase; import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.commons.io.IOs; +import io.airbyte.commons.features.FeatureFlagsWrapper; import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.string.Strings; import io.airbyte.commons.util.MoreIterators; import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaPrimitiveUtil.JsonSchemaPrimitive; @@ -59,23 +58,12 @@ import java.util.stream.Collectors; import org.jooq.DSLContext; import org.jooq.SQLDialect; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.MountableFile; -import uk.org.webcompere.systemstubs.environment.EnvironmentVariables; -import uk.org.webcompere.systemstubs.jupiter.SystemStub; -import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; - -@ExtendWith(SystemStubsExtension.class) -class PostgresSourceTest { - @SystemStub - private EnvironmentVariables environmentVariables; +class PostgresSourceTest { private static final String SCHEMA_NAME = "public"; private static final String STREAM_NAME = "id_and_name"; @@ -144,47 +132,37 @@ class PostgresSourceTest { createRecord(STREAM_NAME_PRIVILEGES_TEST_CASE_VIEW, SCHEMA_NAME, ImmutableMap.of("id", 2, "name", "Jack")), createRecord(STREAM_NAME_PRIVILEGES_TEST_CASE_VIEW, SCHEMA_NAME, ImmutableMap.of("id", 3, "name", "Antuan"))); - private static PostgreSQLContainer PSQL_DB; - - private String dbName; - - @BeforeAll - static void init() { - PSQL_DB = new PostgreSQLContainer<>("postgres:13-alpine"); - PSQL_DB.start(); - } + private PostgresTestDatabase testdb; @BeforeEach void setup() throws Exception { - environmentVariables.set(EnvVariableFeatureFlags.USE_STREAM_CAPABLE_STATE, "true"); - dbName = Strings.addRandomSuffix("db", "_", 10).toLowerCase(); - - final String initScriptName = "init_" + dbName.concat(".sql"); - final String tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE " + dbName + ";"); - PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB); + testdb = PostgresTestDatabase.make("postgres:16-bullseye"); + testdb.database.query(ctx -> { + ctx.fetch( + "CREATE TABLE id_and_name(id NUMERIC(20, 10) NOT NULL, name VARCHAR(200) NOT NULL, power double precision NOT NULL, PRIMARY KEY (id));"); + ctx.fetch("CREATE INDEX i1 ON id_and_name (id);"); + ctx.fetch("INSERT INTO id_and_name (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); + + ctx.fetch("CREATE TABLE id_and_name2(id NUMERIC(20, 10) NOT NULL, name VARCHAR(200) NOT NULL, power double precision NOT NULL);"); + ctx.fetch("INSERT INTO id_and_name2 (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); + + ctx.fetch( + "CREATE TABLE names(first_name VARCHAR(200) NOT NULL, last_name VARCHAR(200) NOT NULL, power double precision NOT NULL, PRIMARY KEY (first_name, last_name));"); + ctx.fetch( + "INSERT INTO names (first_name, last_name, power) VALUES ('san', 'goku', 'Infinity'), ('prince', 'vegeta', 9000.1), ('piccolo', 'junior', '-Infinity');"); + return null; + }); + } - final JsonNode config = getConfig(PSQL_DB, dbName); + @AfterEach + void tearDown() throws SQLException { + testdb.close(); + } - try (final DSLContext dslContext = getDslContext(config)) { - final Database database = getDatabase(dslContext); - database.query(ctx -> { - ctx.fetch( - "CREATE TABLE id_and_name(id NUMERIC(20, 10) NOT NULL, name VARCHAR(200) NOT NULL, power double precision NOT NULL, PRIMARY KEY (id));"); - ctx.fetch("CREATE INDEX i1 ON id_and_name (id);"); - ctx.fetch( - "INSERT INTO id_and_name (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); - - ctx.fetch("CREATE TABLE id_and_name2(id NUMERIC(20, 10) NOT NULL, name VARCHAR(200) NOT NULL, power double precision NOT NULL);"); - ctx.fetch( - "INSERT INTO id_and_name2 (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); - - ctx.fetch( - "CREATE TABLE names(first_name VARCHAR(200) NOT NULL, last_name VARCHAR(200) NOT NULL, power double precision NOT NULL, PRIMARY KEY (first_name, last_name));"); - ctx.fetch( - "INSERT INTO names (first_name, last_name, power) VALUES ('san', 'goku', 'Infinity'), ('prince', 'vegeta', 9000.1), ('piccolo', 'junior', '-Infinity');"); - return null; - }); - } + public PostgresSource source() { + var source = new PostgresSource(); + source.setFeatureFlags(FeatureFlagsWrapper.overridingUseStreamCapableState(new EnvVariableFeatureFlags(), true)); + return source; } private static DSLContext getDslContextWithSpecifiedUser(final JsonNode config, final String username, final String password) { @@ -203,34 +181,18 @@ private static Database getDatabase(final DSLContext dslContext) { return new Database(dslContext); } - private static DSLContext getDslContext(final JsonNode config) { - return DSLContextFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText()), - SQLDialect.POSTGRES); + private JsonNode getConfig() { + return getConfig(testdb.userName, testdb.password); } - private JsonNode getConfig(final PostgreSQLContainer psqlDb, final String dbName) { - return Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, psqlDb.getHost()) - .put(JdbcUtils.PORT_KEY, psqlDb.getFirstMappedPort()) - .put(JdbcUtils.DATABASE_KEY, dbName) - .put(JdbcUtils.SCHEMAS_KEY, List.of(SCHEMA_NAME)) - .put(JdbcUtils.USERNAME_KEY, psqlDb.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, psqlDb.getPassword()) - .put(JdbcUtils.SSL_KEY, false) - .build()); + private JsonNode getConfig(final String user, final String password) { + return getConfig(testdb.dbName, user, password); } - private JsonNode getConfig(final PostgreSQLContainer psqlDb, final String dbName, final String user, final String password) { + private JsonNode getConfig(final String dbName, final String user, final String password) { return Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, psqlDb.getHost()) - .put(JdbcUtils.PORT_KEY, psqlDb.getFirstMappedPort()) + .put(JdbcUtils.HOST_KEY, testdb.container.getHost()) + .put(JdbcUtils.PORT_KEY, testdb.container.getFirstMappedPort()) .put(JdbcUtils.DATABASE_KEY, dbName) .put(JdbcUtils.SCHEMAS_KEY, List.of(SCHEMA_NAME)) .put(JdbcUtils.USERNAME_KEY, user) @@ -239,118 +201,103 @@ private JsonNode getConfig(final PostgreSQLContainer psqlDb, final String dbN .build()); } - private JsonNode getConfig(final PostgreSQLContainer psqlDb, final String user, final String password) { - return getConfig(psqlDb, psqlDb.getDatabaseName(), user, password); - } - - private JsonNode getConfig(final PostgreSQLContainer psqlDb) { - return getConfig(psqlDb, psqlDb.getDatabaseName()); - } - - @AfterAll - static void cleanUp() { - PSQL_DB.close(); + private JsonNode getConfig(PostgresTestDatabase db) { + return Jsons.jsonNode(db.makeConfigBuilder() + .put(JdbcUtils.SCHEMAS_KEY, List.of(SCHEMA_NAME)) + .put(JdbcUtils.SSL_KEY, false) + .build()); } @Test public void testCanReadTablesAndColumnsWithDoubleQuotes() throws Exception { - try (final PostgreSQLContainer db = new PostgreSQLContainer<>("postgres:13-alpine")) { - db.start(); - - final AirbyteCatalog airbyteCatalog = new AirbyteCatalog().withStreams(List.of( - CatalogHelpers.createAirbyteStream( - STREAM_NAME_WITH_QUOTES, - SCHEMA_NAME, - Field.of("id", JsonSchemaType.NUMBER), - Field.of("\"test_column\"", JsonSchemaType.STRING)) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) - .withSourceDefinedPrimaryKey(List.of(List.of("id"))))); - - final JsonNode config = getConfig(db); - try (final DSLContext dslContext = getDslContext(config)) { - final Database database = getDatabase(dslContext); - - database.query(ctx -> { - ctx.fetch("CREATE TABLE \"\"\"test_dq_table\"\"\"(id INTEGER PRIMARY KEY, \"\"\"test_column\"\"\" varchar);"); - ctx.fetch("INSERT INTO \"\"\"test_dq_table\"\"\" (id, \"\"\"test_column\"\"\") VALUES (1,'test1'), (2, 'test2');"); - return null; - }); - } - final Set actualMessages = - MoreIterators.toSet(new PostgresSource().read(config, CatalogHelpers.toDefaultConfiguredCatalog(airbyteCatalog), null)); - setEmittedAtToNull(actualMessages); - - assertEquals(DOUBLE_QUOTED_MESSAGES, actualMessages); - db.stop(); - } + final AirbyteCatalog airbyteCatalog = new AirbyteCatalog().withStreams(List.of( + CatalogHelpers.createAirbyteStream( + STREAM_NAME_WITH_QUOTES, + SCHEMA_NAME, + Field.of("id", JsonSchemaType.NUMBER), + Field.of("\"test_column\"", JsonSchemaType.STRING)) + .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) + .withSourceDefinedPrimaryKey(List.of(List.of("id"))))); + testdb.database.query(ctx -> { + ctx.fetch("CREATE TABLE \"\"\"test_dq_table\"\"\"(id INTEGER PRIMARY KEY, \"\"\"test_column\"\"\" varchar);"); + ctx.fetch("INSERT INTO \"\"\"test_dq_table\"\"\" (id, \"\"\"test_column\"\"\") VALUES (1,'test1'), (2, 'test2');"); + return null; + }); + final Set actualMessages = + MoreIterators.toSet(source().read( + getConfig(), + CatalogHelpers.toDefaultConfiguredCatalog(airbyteCatalog), + null)); + setEmittedAtToNull(actualMessages); + assertEquals(DOUBLE_QUOTED_MESSAGES, actualMessages); + testdb.database.query(ctx -> ctx.execute("DROP TABLE \"\"\"test_dq_table\"\"\";")); } @Test public void testCanReadUtf8() throws Exception { // force the db server to start with sql_ascii encoding to verify the source can read UTF8 even when // default settings are in another encoding - try (final PostgreSQLContainer db = new PostgreSQLContainer<>("postgres:13-alpine").withCommand("postgres -c client_encoding=sql_ascii")) { - db.start(); - final JsonNode config = getConfig(db); - try (final DSLContext dslContext = getDslContext(config)) { - final Database database = getDatabase(dslContext); - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,E'\\u2013 someutfstring'), (2, E'\\u2215');"); - return null; - }); - } - - final Set actualMessages = MoreIterators.toSet(new PostgresSource().read(config, CONFIGURED_CATALOG, null)); + try (final var asciiTestDB = PostgresTestDatabase.make("postgres:16-alpine", "withASCII")) { + asciiTestDB.database.query(ctx -> { + ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,E'\\u2013 someutfstring'), (2, E'\\u2215');"); + return null; + }); + final var config = Jsons.jsonNode(ImmutableMap.builder() + .put(JdbcUtils.HOST_KEY, asciiTestDB.container.getHost()) + .put(JdbcUtils.PORT_KEY, asciiTestDB.container.getFirstMappedPort()) + .put(JdbcUtils.DATABASE_KEY, asciiTestDB.dbName) + .put(JdbcUtils.SCHEMAS_KEY, List.of(SCHEMA_NAME)) + .put(JdbcUtils.USERNAME_KEY, asciiTestDB.userName) + .put(JdbcUtils.PASSWORD_KEY, asciiTestDB.password) + .put(JdbcUtils.SSL_KEY, false) + .build()); + final Set actualMessages = MoreIterators.toSet(source().read(config, CONFIGURED_CATALOG, null)); setEmittedAtToNull(actualMessages); - assertEquals(UTF8_MESSAGES, actualMessages); - db.stop(); } } @Test void testUserDoesntHasPrivilegesToSelectTable() throws Exception { - try (final PostgreSQLContainer db = new PostgreSQLContainer<>("postgres:13-alpine")) { - db.start(); - final JsonNode config = getConfig(db); - try (final DSLContext dslContext = getDslContext(config)) { - final Database database = new Database(dslContext); - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'John'), (2, 'Alfred'), (3, 'Alex');"); - ctx.fetch("CREATE USER test_user_3 password '132';"); - ctx.fetch("GRANT CONNECT ON DATABASE test TO test_user_3;"); - ctx.fetch("REVOKE ALL PRIVILEGES ON TABLE public.id_and_name FROM test_user_3"); - return null; - }); - } - try (final DSLContext dslContext = getDslContextWithSpecifiedUser(config, "test_user_3", "132")) { - final Database database = new Database(dslContext); - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name_3(id INTEGER, name VARCHAR(200));"); - ctx.fetch("CREATE VIEW id_and_name_3_view(id, name) as\n" - + "SELECT id_and_name_3.id,\n" - + " id_and_name_3.name\n" - + "FROM id_and_name_3;\n" - + "ALTER TABLE id_and_name_3_view\n" - + " owner TO test_user_3"); - ctx.fetch("INSERT INTO id_and_name_3 (id, name) VALUES (1,'Zed'), (2, 'Jack'), (3, 'Antuan');"); - return null; - }); - } - final JsonNode anotherUserConfig = getConfig(db, "test_user_3", "132"); - final Set actualMessages = MoreIterators.toSet(new PostgresSource().read(anotherUserConfig, CONFIGURED_CATALOG, null)); - setEmittedAtToNull(actualMessages); - assertEquals(6, actualMessages.size()); - assertEquals(PRIVILEGE_TEST_CASE_EXPECTED_MESSAGES, actualMessages); - db.stop(); + testdb.database.query(ctx -> { + ctx.execute("DROP TABLE id_and_name CASCADE;"); + ctx.execute("DROP TABLE id_and_name2 CASCADE;"); + ctx.execute("DROP TABLE names CASCADE;"); + ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); + ctx.fetch("INSERT INTO id_and_name (id, name) VALUES (1,'John'), (2, 'Alfred'), (3, 'Alex');"); + ctx.fetch("CREATE USER test_user_3 password '132';"); + ctx.fetch("GRANT CONNECT ON DATABASE " + testdb.dbName + " TO test_user_3;"); + ctx.fetch("GRANT ALL ON SCHEMA public TO test_user_3"); + ctx.fetch("REVOKE ALL PRIVILEGES ON TABLE public.id_and_name FROM test_user_3"); + return null; + }); + final JsonNode config = getConfig(); + try (final DSLContext dslContext = getDslContextWithSpecifiedUser(config, "test_user_3", "132")) { + final Database database = new Database(dslContext); + database.query(ctx -> { + ctx.fetch("CREATE TABLE id_and_name_3(id INTEGER, name VARCHAR(200));"); + ctx.fetch("CREATE VIEW id_and_name_3_view(id, name) as\n" + + "SELECT id_and_name_3.id,\n" + + " id_and_name_3.name\n" + + "FROM id_and_name_3;\n" + + "ALTER TABLE id_and_name_3_view\n" + + " owner TO test_user_3"); + ctx.fetch("INSERT INTO id_and_name_3 (id, name) VALUES (1,'Zed'), (2, 'Jack'), (3, 'Antuan');"); + return null; + }); } + final JsonNode anotherUserConfig = getConfig("test_user_3", "132"); + final Set actualMessages = + MoreIterators.toSet(source().read(anotherUserConfig, CONFIGURED_CATALOG, null)); + setEmittedAtToNull(actualMessages); + assertEquals(6, actualMessages.size()); + assertEquals(PRIVILEGE_TEST_CASE_EXPECTED_MESSAGES, actualMessages); } @Test void testDiscoverWithPk() throws Exception { - final AirbyteCatalog actual = new PostgresSource().discover(getConfig(PSQL_DB, dbName)); + final AirbyteCatalog actual = source().discover(getConfig()); actual.getStreams().forEach(actualStream -> { final Optional expectedStream = CATALOG.getStreams().stream().filter(stream -> stream.getName().equals(actualStream.getName())).findAny(); @@ -361,150 +308,150 @@ void testDiscoverWithPk() throws Exception { @Test void testDiscoverRecursiveRolePermissions() throws Exception { - try (final PostgreSQLContainer db = new PostgreSQLContainer<>("postgres:13-alpine")) { - db.start(); - final JsonNode config = getConfig(db); - try (final DSLContext dslContext = getDslContext(config)) { - final Database database = new Database(dslContext); - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name_7(id INTEGER, name VARCHAR(200));"); - ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); + testdb.database.query(ctx -> { + ctx.execute("DROP TABLE id_and_name CASCADE;"); + ctx.execute("DROP TABLE id_and_name2 CASCADE;"); + ctx.execute("DROP TABLE names CASCADE;"); + ctx.fetch("CREATE TABLE id_and_name_7(id INTEGER, name VARCHAR(200));"); + ctx.fetch("CREATE TABLE id_and_name(id INTEGER, name VARCHAR(200));"); - ctx.fetch("CREATE USER test_user_4 password '132';"); + ctx.fetch("CREATE USER test_user_4 password '132';"); + ctx.fetch("GRANT ALL ON SCHEMA public TO test_user_4"); - ctx.fetch("CREATE ROLE airbyte LOGIN password 'airbyte';"); - ctx.fetch("CREATE ROLE read_only LOGIN password 'read_only';"); - ctx.fetch("CREATE ROLE intermediate LOGIN password 'intermediate';"); + ctx.fetch("CREATE ROLE airbyte LOGIN password 'airbyte';"); + ctx.fetch("CREATE ROLE read_only LOGIN password 'read_only';"); + ctx.fetch("CREATE ROLE intermediate LOGIN password 'intermediate';"); - ctx.fetch("CREATE ROLE access_nothing LOGIN password 'access_nothing';"); + ctx.fetch("CREATE ROLE access_nothing LOGIN password 'access_nothing';"); - ctx.fetch("GRANT intermediate TO airbyte;"); - ctx.fetch("GRANT read_only TO intermediate;"); + ctx.fetch("GRANT intermediate TO airbyte;"); + ctx.fetch("GRANT read_only TO intermediate;"); - ctx.fetch("GRANT SELECT ON id_and_name, id_and_name_7 TO read_only;"); - ctx.fetch("GRANT airbyte TO test_user_4;"); + ctx.fetch("GRANT SELECT ON id_and_name, id_and_name_7 TO read_only;"); + ctx.fetch("GRANT airbyte TO test_user_4;"); - ctx.fetch("CREATE TABLE unseen(id INTEGER, name VARCHAR(200));"); - ctx.fetch("GRANT CONNECT ON DATABASE test TO test_user_4;"); - return null; - }); - } - try (final DSLContext dslContext = getDslContextWithSpecifiedUser(config, "test_user_4", "132")) { - final Database database = new Database(dslContext); - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_name_3(id INTEGER, name VARCHAR(200));"); - return null; - }); - } - AirbyteCatalog actual = new PostgresSource().discover(getConfig(db, "test_user_4", "132")); - Set tableNames = actual.getStreams().stream().map(stream -> stream.getName()).collect(Collectors.toSet()); - assertEquals(Sets.newHashSet("id_and_name", "id_and_name_7", "id_and_name_3"), tableNames); - - actual = new PostgresSource().discover(getConfig(db, "access_nothing", "access_nothing")); - tableNames = actual.getStreams().stream().map(stream -> stream.getName()).collect(Collectors.toSet()); - assertEquals(Sets.newHashSet(), tableNames); - db.stop(); + ctx.fetch("CREATE TABLE unseen(id INTEGER, name VARCHAR(200));"); + ctx.fetch("GRANT CONNECT ON DATABASE " + testdb.dbName + " TO test_user_4;"); + return null; + }); + final var config = getConfig(); + + try (final DSLContext dslContext = getDslContextWithSpecifiedUser(config, "test_user_4", "132")) { + final Database database = new Database(dslContext); + database.query(ctx -> { + ctx.fetch("CREATE TABLE id_and_name_3(id INTEGER, name VARCHAR(200));"); + return null; + }); } + AirbyteCatalog actual = source().discover(getConfig("test_user_4", "132")); + Set tableNames = actual.getStreams().stream().map(stream -> stream.getName()).collect(Collectors.toSet()); + assertEquals(Sets.newHashSet("id_and_name", "id_and_name_7", "id_and_name_3"), tableNames); + + actual = source().discover(getConfig("access_nothing", "access_nothing")); + tableNames = actual.getStreams().stream().map(stream -> stream.getName()).collect(Collectors.toSet()); + assertEquals(Sets.newHashSet(), tableNames); } @Test void testDiscoverDifferentGrantAvailability() throws Exception { - try (final PostgreSQLContainer db = new PostgreSQLContainer<>("postgres:13-alpine")) { - db.start(); - final JsonNode config = getConfig(db); - try (final DSLContext dslContext = getDslContext(config)) { - final Database database = new Database(dslContext); - database.query(ctx -> { - ctx.fetch("create table not_granted_table_name_1(column_1 integer);"); - ctx.fetch("create table not_granted_table_name_2(column_1 integer);"); - ctx.fetch("create table not_granted_table_name_3(column_1 integer);"); - ctx.fetch("create table table_granted_by_role(column_1 integer);"); - ctx.fetch("create table test_table_granted_directly(column_1 integer);"); - ctx.fetch("create table table_granted_by_role_with_options(column_1 integer);"); - ctx.fetch("create table test_table_granted_directly_with_options(column_1 integer);"); - - ctx.fetch("create materialized view not_granted_mv_name_1 as SELECT not_granted_table_name_1.column_1 FROM not_granted_table_name_1;"); - ctx.fetch("create materialized view not_granted_mv_name_2 as SELECT not_granted_table_name_2.column_1 FROM not_granted_table_name_2;"); - ctx.fetch("create materialized view not_granted_mv_name_3 as SELECT not_granted_table_name_3.column_1 FROM not_granted_table_name_3;"); - ctx.fetch("create materialized view mv_granted_by_role as SELECT table_granted_by_role.column_1 FROM table_granted_by_role;"); - ctx.fetch( - "create materialized view test_mv_granted_directly as SELECT test_table_granted_directly.column_1 FROM test_table_granted_directly;"); - ctx.fetch( - "create materialized view mv_granted_by_role_with_options as SELECT table_granted_by_role_with_options.column_1 FROM table_granted_by_role_with_options;"); - ctx.fetch( - "create materialized view test_mv_granted_directly_with_options as SELECT test_table_granted_directly_with_options.column_1 FROM test_table_granted_directly_with_options;"); - - ctx.fetch("create view not_granted_view_name_1(column_1) as SELECT not_granted_table_name_1.column_1 FROM not_granted_table_name_1;"); - ctx.fetch("create view not_granted_view_name_2(column_1) as SELECT not_granted_table_name_2.column_1 FROM not_granted_table_name_2;"); - ctx.fetch("create view not_granted_view_name_3(column_1) as SELECT not_granted_table_name_3.column_1 FROM not_granted_table_name_3;"); - ctx.fetch("create view view_granted_by_role(column_1) as SELECT table_granted_by_role.column_1 FROM table_granted_by_role;"); - ctx.fetch( - "create view test_view_granted_directly(column_1) as SELECT test_table_granted_directly.column_1 FROM test_table_granted_directly;"); - ctx.fetch( - "create view view_granted_by_role_with_options(column_1) as SELECT table_granted_by_role_with_options.column_1 FROM table_granted_by_role_with_options;"); - ctx.fetch( - "create view test_view_granted_directly_with_options(column_1) as SELECT test_table_granted_directly_with_options.column_1 FROM test_table_granted_directly_with_options;"); - - ctx.fetch("create role test_role;"); - - ctx.fetch("grant delete on not_granted_table_name_2 to test_role;"); - ctx.fetch("grant delete on not_granted_mv_name_2 to test_role;"); - ctx.fetch("grant delete on not_granted_view_name_2 to test_role;"); - - ctx.fetch("grant select on table_granted_by_role to test_role;"); - ctx.fetch("grant select on mv_granted_by_role to test_role;"); - ctx.fetch("grant select on view_granted_by_role to test_role;"); - - ctx.fetch("grant select on table_granted_by_role_with_options to test_role with grant option;"); - ctx.fetch("grant select on mv_granted_by_role_with_options to test_role with grant option;"); - ctx.fetch("grant select on view_granted_by_role_with_options to test_role with grant option;"); - - ctx.fetch("create user new_test_user;"); - ctx.fetch("ALTER USER new_test_user WITH PASSWORD 'new_pass';"); - ctx.fetch("GRANT CONNECT ON DATABASE test TO new_test_user;"); - - ctx.fetch("grant test_role to new_test_user;"); - - ctx.fetch("grant delete on not_granted_table_name_3 to new_test_user;"); - ctx.fetch("grant delete on not_granted_mv_name_3 to new_test_user;"); - ctx.fetch("grant delete on not_granted_view_name_3 to new_test_user;"); - - ctx.fetch("grant select on test_table_granted_directly to new_test_user;"); - ctx.fetch("grant select on test_mv_granted_directly to new_test_user;"); - ctx.fetch("grant select on test_view_granted_directly to new_test_user;"); - - ctx.fetch("grant select on test_table_granted_directly_with_options to test_role with grant option;"); - ctx.fetch("grant select on test_mv_granted_directly_with_options to test_role with grant option;"); - ctx.fetch("grant select on test_view_granted_directly_with_options to test_role with grant option;"); - return null; - }); - } + final JsonNode config = getConfig(); + testdb.database.query(ctx -> { + ctx.fetch("create table not_granted_table_name_1(column_1 integer);"); + ctx.fetch("create table not_granted_table_name_2(column_1 integer);"); + ctx.fetch("create table not_granted_table_name_3(column_1 integer);"); + ctx.fetch("create table table_granted_by_role(column_1 integer);"); + ctx.fetch("create table test_table_granted_directly(column_1 integer);"); + ctx.fetch("create table table_granted_by_role_with_options(column_1 integer);"); + ctx.fetch("create table test_table_granted_directly_with_options(column_1 integer);"); + + ctx.fetch( + "create materialized view not_granted_mv_name_1 as SELECT not_granted_table_name_1.column_1 FROM not_granted_table_name_1;"); + ctx.fetch( + "create materialized view not_granted_mv_name_2 as SELECT not_granted_table_name_2.column_1 FROM not_granted_table_name_2;"); + ctx.fetch( + "create materialized view not_granted_mv_name_3 as SELECT not_granted_table_name_3.column_1 FROM not_granted_table_name_3;"); + ctx.fetch( + "create materialized view mv_granted_by_role as SELECT table_granted_by_role.column_1 FROM table_granted_by_role;"); + ctx.fetch( + "create materialized view test_mv_granted_directly as SELECT test_table_granted_directly.column_1 FROM test_table_granted_directly;"); + ctx.fetch( + "create materialized view mv_granted_by_role_with_options as SELECT table_granted_by_role_with_options.column_1 FROM table_granted_by_role_with_options;"); + ctx.fetch( + "create materialized view test_mv_granted_directly_with_options as SELECT test_table_granted_directly_with_options.column_1 FROM test_table_granted_directly_with_options;"); + + ctx.fetch( + "create view not_granted_view_name_1(column_1) as SELECT not_granted_table_name_1.column_1 FROM not_granted_table_name_1;"); + ctx.fetch( + "create view not_granted_view_name_2(column_1) as SELECT not_granted_table_name_2.column_1 FROM not_granted_table_name_2;"); + ctx.fetch( + "create view not_granted_view_name_3(column_1) as SELECT not_granted_table_name_3.column_1 FROM not_granted_table_name_3;"); + ctx.fetch( + "create view view_granted_by_role(column_1) as SELECT table_granted_by_role.column_1 FROM table_granted_by_role;"); + ctx.fetch( + "create view test_view_granted_directly(column_1) as SELECT test_table_granted_directly.column_1 FROM test_table_granted_directly;"); + ctx.fetch( + "create view view_granted_by_role_with_options(column_1) as SELECT table_granted_by_role_with_options.column_1 FROM table_granted_by_role_with_options;"); + ctx.fetch( + "create view test_view_granted_directly_with_options(column_1) as SELECT test_table_granted_directly_with_options.column_1 FROM test_table_granted_directly_with_options;"); + + ctx.fetch("create role test_role;"); + + ctx.fetch("grant delete on not_granted_table_name_2 to test_role;"); + ctx.fetch("grant delete on not_granted_mv_name_2 to test_role;"); + ctx.fetch("grant delete on not_granted_view_name_2 to test_role;"); + + ctx.fetch("grant select on table_granted_by_role to test_role;"); + ctx.fetch("grant select on mv_granted_by_role to test_role;"); + ctx.fetch("grant select on view_granted_by_role to test_role;"); + + ctx.fetch("grant select on table_granted_by_role_with_options to test_role with grant option;"); + ctx.fetch("grant select on mv_granted_by_role_with_options to test_role with grant option;"); + ctx.fetch("grant select on view_granted_by_role_with_options to test_role with grant option;"); + + ctx.fetch("create user new_test_user;"); + ctx.fetch("ALTER USER new_test_user WITH PASSWORD 'new_pass';"); + ctx.fetch("GRANT CONNECT ON DATABASE " + testdb.dbName + " TO new_test_user;"); + ctx.fetch("GRANT ALL ON SCHEMA public TO test_user_4"); + + ctx.fetch("grant test_role to new_test_user;"); + + ctx.fetch("grant delete on not_granted_table_name_3 to new_test_user;"); + ctx.fetch("grant delete on not_granted_mv_name_3 to new_test_user;"); + ctx.fetch("grant delete on not_granted_view_name_3 to new_test_user;"); + + ctx.fetch("grant select on test_table_granted_directly to new_test_user;"); + ctx.fetch("grant select on test_mv_granted_directly to new_test_user;"); + ctx.fetch("grant select on test_view_granted_directly to new_test_user;"); + + ctx.fetch("grant select on test_table_granted_directly_with_options to test_role with grant option;"); + ctx.fetch("grant select on test_mv_granted_directly_with_options to test_role with grant option;"); + ctx.fetch("grant select on test_view_granted_directly_with_options to test_role with grant option;"); + return null; + }); - final AirbyteCatalog actual = new PostgresSource().discover(getConfig(db, "new_test_user", "new_pass")); - actual.getStreams().stream().forEach(airbyteStream -> { - assertEquals(2, airbyteStream.getSupportedSyncModes().size()); - assertTrue(airbyteStream.getSupportedSyncModes().contains(SyncMode.FULL_REFRESH)); - assertTrue(airbyteStream.getSupportedSyncModes().contains(SyncMode.INCREMENTAL)); - }); - final Set tableNames = actual.getStreams().stream().map(stream -> stream.getName()).collect(Collectors.toSet()); - final Set expectedVisibleNames = Sets.newHashSet( - "table_granted_by_role", - "table_granted_by_role_with_options", - "test_table_granted_directly", - "test_table_granted_directly_with_options", - "mv_granted_by_role", - "mv_granted_by_role_with_options", - "test_mv_granted_directly", - "test_mv_granted_directly_with_options", - "test_view_granted_directly", - "test_view_granted_directly_with_options", - "view_granted_by_role", - "view_granted_by_role_with_options"); - - assertEquals(tableNames, expectedVisibleNames); - - db.stop(); - } + final AirbyteCatalog actual = source().discover(getConfig("new_test_user", "new_pass")); + actual.getStreams().stream().forEach(airbyteStream -> { + assertEquals(2, airbyteStream.getSupportedSyncModes().size()); + assertTrue(airbyteStream.getSupportedSyncModes().contains(SyncMode.FULL_REFRESH)); + assertTrue(airbyteStream.getSupportedSyncModes().contains(SyncMode.INCREMENTAL)); + }); + final Set tableNames = + actual.getStreams().stream().map(stream -> stream.getName()).collect(Collectors.toSet()); + final Set expectedVisibleNames = Sets.newHashSet( + "table_granted_by_role", + "table_granted_by_role_with_options", + "test_table_granted_directly", + "test_table_granted_directly_with_options", + "mv_granted_by_role", + "mv_granted_by_role_with_options", + "test_mv_granted_directly", + "test_mv_granted_directly_with_options", + "test_view_granted_directly", + "test_view_granted_directly_with_options", + "view_granted_by_role", + "view_granted_by_role_with_options"); + + assertEquals(tableNames, expectedVisibleNames); } @Test @@ -512,7 +459,7 @@ void testReadSuccess() throws Exception { final ConfiguredAirbyteCatalog configuredCatalog = CONFIGURED_CATALOG.withStreams(CONFIGURED_CATALOG.getStreams().stream().filter(s -> s.getStream().getName().equals(STREAM_NAME)).collect( Collectors.toList())); - final Set actualMessages = MoreIterators.toSet(new PostgresSource().read(getConfig(PSQL_DB, dbName), configuredCatalog, null)); + final Set actualMessages = MoreIterators.toSet(source().read(getConfig(), configuredCatalog, null)); setEmittedAtToNull(actualMessages); assertEquals(ASCII_MESSAGES, actualMessages); @@ -520,56 +467,55 @@ void testReadSuccess() throws Exception { @Test void testReadIncrementalSuccess() throws Exception { - final JsonNode config = getConfig(PSQL_DB, dbName); // We want to test ordering, so we can delete the NaN entry and add a 3. - try (final DSLContext dslContext = getDslContext(config)) { - final Database database = getDatabase(dslContext); - database.query(ctx -> { - ctx.fetch("DELETE FROM id_and_name WHERE id = 'NaN';"); - ctx.fetch("INSERT INTO id_and_name (id, name, power) VALUES (3, 'gohan', 222.1);"); - return null; - }); + testdb.database.query(ctx -> { + ctx.fetch("DELETE FROM id_and_name WHERE id = 'NaN';"); + ctx.fetch("INSERT INTO id_and_name (id, name, power) VALUES (3, 'gohan', 222.1);"); + return null; + }); - final ConfiguredAirbyteCatalog configuredCatalog = - CONFIGURED_INCR_CATALOG - .withStreams(CONFIGURED_INCR_CATALOG.getStreams().stream().filter(s -> s.getStream().getName().equals(STREAM_NAME)).collect( - Collectors.toList())); - final PostgresSource source = new PostgresSource(); - source.setStateEmissionFrequencyForDebug(1); - final List actualMessages = MoreIterators.toList(source.read(getConfig(PSQL_DB, dbName), configuredCatalog, null)); - setEmittedAtToNull(actualMessages); + final ConfiguredAirbyteCatalog configuredCatalog = + CONFIGURED_INCR_CATALOG + .withStreams(CONFIGURED_INCR_CATALOG.getStreams() + .stream() + .filter(s -> s.getStream().getName().equals(STREAM_NAME)) + .toList()); + final PostgresSource source = source(); + source.setStateEmissionFrequencyForDebug(1); + final List actualMessages = MoreIterators.toList(source.read(getConfig(), configuredCatalog, null)); + setEmittedAtToNull(actualMessages); - final List stateAfterFirstBatch = extractStateMessage(actualMessages); + final List stateAfterFirstBatch = extractStateMessage(actualMessages); - setEmittedAtToNull(actualMessages); + setEmittedAtToNull(actualMessages); - final Set expectedOutput = Sets.newHashSet( - createRecord(STREAM_NAME, SCHEMA_NAME, map("id", new BigDecimal("1.0"), "name", "goku", "power", null)), - createRecord(STREAM_NAME, SCHEMA_NAME, map("id", new BigDecimal("2.0"), "name", "vegeta", "power", 9000.1)), - createRecord(STREAM_NAME, SCHEMA_NAME, map("id", new BigDecimal("3.0"), "name", "vegeta", "power", 222.1))); + final Set expectedOutput = Sets.newHashSet( + createRecord(STREAM_NAME, SCHEMA_NAME, map("id", new BigDecimal("1.0"), "name", "goku", "power", null)), + createRecord(STREAM_NAME, SCHEMA_NAME, map("id", new BigDecimal("2.0"), "name", "vegeta", "power", 9000.1)), + createRecord(STREAM_NAME, SCHEMA_NAME, map("id", new BigDecimal("3.0"), "name", "vegeta", "power", 222.1))); - // Assert that the correct number of messages are emitted. - assertEquals(actualMessages.size(), expectedOutput.size() + 1); - assertThat(actualMessages.contains(expectedOutput)); - // Assert that the Postgres source is emitting records & state messages in the correct order. - assertCorrectRecordOrderForIncrementalSync(actualMessages, "id", JsonSchemaPrimitive.NUMBER, configuredCatalog, - new AirbyteStreamNameNamespacePair("id_and_name", "public")); + // Assert that the correct number of messages are emitted. + assertEquals(actualMessages.size(), expectedOutput.size() + 1); + assertThat(actualMessages.contains(expectedOutput)); + // Assert that the Postgres source is emitting records & state messages in the correct order. + assertCorrectRecordOrderForIncrementalSync(actualMessages, "id", JsonSchemaPrimitive.NUMBER, configuredCatalog, + new AirbyteStreamNameNamespacePair("id_and_name", "public")); - final AirbyteStateMessage lastEmittedState = stateAfterFirstBatch.get(stateAfterFirstBatch.size() - 1); - final JsonNode state = Jsons.jsonNode(List.of(lastEmittedState)); + final AirbyteStateMessage lastEmittedState = stateAfterFirstBatch.get(stateAfterFirstBatch.size() - 1); + final JsonNode state = Jsons.jsonNode(List.of(lastEmittedState)); - database.query(ctx -> { - ctx.fetch("INSERT INTO id_and_name (id, name, power) VALUES (5, 'piccolo', 100.0);"); - return null; - }); - // Incremental sync should only read one new message (where id = '5.0') - final Set nextSyncMessages = MoreIterators.toSet(source.read(getConfig(PSQL_DB, dbName), configuredCatalog, state)); - setEmittedAtToNull(nextSyncMessages); + testdb.database.query(ctx -> { + ctx.fetch("INSERT INTO id_and_name (id, name, power) VALUES (5, 'piccolo', 100.0);"); + return null; + }); + // Incremental sync should only read one new message (where id = '5.0') + final Set nextSyncMessages = + MoreIterators.toSet(source.read(getConfig(), configuredCatalog, state)); + setEmittedAtToNull(nextSyncMessages); - // An extra state message is emitted, in addition to the record messages. - assertEquals(nextSyncMessages.size(), 2); - assertThat(nextSyncMessages.contains(createRecord(STREAM_NAME, SCHEMA_NAME, map("id", "5.0", "name", "piccolo", "power", 100.0)))); - } + // An extra state message is emitted, in addition to the record messages. + assertEquals(nextSyncMessages.size(), 2); + assertThat(nextSyncMessages.contains(createRecord(STREAM_NAME, SCHEMA_NAME, map("id", "5.0", "name", "piccolo", "power", 100.0)))); } /* @@ -608,7 +554,7 @@ private void assertCorrectRecordOrderForIncrementalSync(final List db = new PostgreSQLContainer<>("postgres:13-alpine")) { - db.start(); - final JsonNode config = getConfig(db); - try (final DSLContext dslContext = getDslContext(config)) { - final Database database = new Database(dslContext); - final ConfiguredAirbyteStream tableWithInvalidCursorType = createTableWithInvalidCursorType(database); - final ConfiguredAirbyteCatalog configuredAirbyteCatalog = - new ConfiguredAirbyteCatalog().withStreams(Collections.singletonList(tableWithInvalidCursorType)); - - final Throwable throwable = catchThrowable(() -> MoreIterators.toSet(new PostgresSource().read(config, configuredAirbyteCatalog, null))); - assertThat(throwable).isInstanceOf(ConfigErrorException.class) - .hasMessageContaining( - "The following tables have invalid columns selected as cursor, please select a column with a well-defined ordering with no null values as a cursor. {tableName='public.test_table', cursorColumnName='id', cursorSqlType=OTHER, cause=Unsupported cursor type}"); - } finally { - db.stop(); - } - } + final ConfiguredAirbyteStream tableWithInvalidCursorType = createTableWithInvalidCursorType(testdb.database); + final ConfiguredAirbyteCatalog configuredAirbyteCatalog = + new ConfiguredAirbyteCatalog().withStreams(Collections.singletonList(tableWithInvalidCursorType)); + + final Throwable throwable = + catchThrowable(() -> MoreIterators.toSet(source().read(getConfig(), configuredAirbyteCatalog, null))); + assertThat(throwable).isInstanceOf(ConfigErrorException.class) + .hasMessageContaining( + "The following tables have invalid columns selected as cursor, please select a column with a well-defined ordering with no null values as a cursor. {tableName='public.test_table', cursorColumnName='id', cursorSqlType=OTHER, cause=Unsupported cursor type}"); } private ConfiguredAirbyteStream createTableWithInvalidCursorType(final Database database) throws SQLException { @@ -678,7 +616,7 @@ private ConfiguredAirbyteStream createTableWithInvalidCursorType(final Database @Test void testJdbcUrlWithEscapedDatabaseName() { - final JsonNode jdbcConfig = new PostgresSource().toDatabaseConfig(buildConfigEscapingNeeded()); + final JsonNode jdbcConfig = source().toDatabaseConfig(buildConfigEscapingNeeded()); assertEquals(EXPECTED_JDBC_ESCAPED_URL, jdbcConfig.get(JdbcUtils.JDBC_URL_KEY).asText()); } @@ -695,23 +633,13 @@ private JsonNode buildConfigEscapingNeeded() { @Test public void tableWithNullValueCursorShouldThrowException() throws SQLException { - try (final PostgreSQLContainer db = new PostgreSQLContainer<>("postgres:13-alpine")) { - db.start(); - final JsonNode config = getConfig(db); - try (final DSLContext dslContext = getDslContext(config)) { - final Database database = new Database(dslContext); - final ConfiguredAirbyteStream table = createTableWithNullValueCursor(database); - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(Collections.singletonList(table)); - - final Throwable throwable = catchThrowable(() -> MoreIterators.toSet(new PostgresSource().read(config, catalog, null))); - assertThat(throwable).isInstanceOf(ConfigErrorException.class) - .hasMessageContaining( - "The following tables have invalid columns selected as cursor, please select a column with a well-defined ordering with no null values as a cursor. {tableName='public.test_table_null_cursor', cursorColumnName='id', cursorSqlType=INTEGER, cause=Cursor column contains NULL value}"); - - } finally { - db.stop(); - } - } + final ConfiguredAirbyteStream table = createTableWithNullValueCursor(testdb.database); + final ConfiguredAirbyteCatalog catalog = + new ConfiguredAirbyteCatalog().withStreams(Collections.singletonList(table)); + + final Throwable throwable = catchThrowable(() -> MoreIterators.toSet(source().read(getConfig(), catalog, null))); + assertThat(throwable).isInstanceOf(ConfigErrorException.class).hasMessageContaining( + "The following tables have invalid columns selected as cursor, please select a column with a well-defined ordering with no null values as a cursor. {tableName='public.test_table_null_cursor', cursorColumnName='id', cursorSqlType=INTEGER, cause=Cursor column contains NULL value}"); } private ConfiguredAirbyteStream createTableWithNullValueCursor(final Database database) throws SQLException { @@ -736,23 +664,13 @@ private ConfiguredAirbyteStream createTableWithNullValueCursor(final Database da @Test public void viewWithNullValueCursorShouldThrowException() throws SQLException { - try (final PostgreSQLContainer db = new PostgreSQLContainer<>("postgres:13-alpine")) { - db.start(); - final JsonNode config = getConfig(db); - try (final DSLContext dslContext = getDslContext(config)) { - final Database database = new Database(dslContext); - final ConfiguredAirbyteStream table = createViewWithNullValueCursor(database); - final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(Collections.singletonList(table)); - - final Throwable throwable = catchThrowable(() -> MoreIterators.toSet(new PostgresSource().read(config, catalog, null))); - assertThat(throwable).isInstanceOf(ConfigErrorException.class) - .hasMessageContaining( - "The following tables have invalid columns selected as cursor, please select a column with a well-defined ordering with no null values as a cursor. {tableName='public.test_view_null_cursor', cursorColumnName='id', cursorSqlType=INTEGER, cause=Cursor column contains NULL value}"); - - } finally { - db.stop(); - } - } + final ConfiguredAirbyteStream table = createViewWithNullValueCursor(testdb.database); + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withStreams(Collections.singletonList(table)); + + final Throwable throwable = catchThrowable(() -> MoreIterators.toSet(source().read(getConfig(), catalog, null))); + assertThat(throwable).isInstanceOf(ConfigErrorException.class) + .hasMessageContaining( + "The following tables have invalid columns selected as cursor, please select a column with a well-defined ordering with no null values as a cursor. {tableName='public.test_view_null_cursor', cursorColumnName='id', cursorSqlType=INTEGER, cause=Cursor column contains NULL value}"); } private ConfiguredAirbyteStream createViewWithNullValueCursor(final Database database) throws SQLException { @@ -813,86 +731,83 @@ void testParseJdbcParameters() { @Test public void testJdbcOptionsParameter() throws Exception { - try (final PostgreSQLContainer db = new PostgreSQLContainer<>("postgres:13-alpine")) { - db.start(); - - // Populate DB. - final JsonNode dbConfig = getConfig(db); - try (final DSLContext dslContext = getDslContext(dbConfig)) { - final Database database = getDatabase(dslContext); - database.query(ctx -> { - ctx.fetch("CREATE TABLE id_and_bytes (id INTEGER, bytes BYTEA);"); - ctx.fetch("INSERT INTO id_and_bytes (id, bytes) VALUES (1, decode('DEADBEEF', 'hex'));"); - return null; - }); - } + // Populate DB. + final JsonNode dbConfig = getConfig(); + testdb.database.query(ctx -> { + ctx.fetch("CREATE TABLE id_and_bytes (id INTEGER, bytes BYTEA);"); + ctx.fetch("INSERT INTO id_and_bytes (id, bytes) VALUES (1, decode('DEADBEEF', 'hex'));"); + return null; + }); - // Read the table contents using the non-default 'escape' format for bytea values. - final JsonNode sourceConfig = Jsons.jsonNode(ImmutableMap.builder() - .putAll(Jsons.flatten(dbConfig)) - .put(JdbcUtils.JDBC_URL_PARAMS_KEY, "options=-c%20statement_timeout=90000%20-c%20bytea_output=escape") - .build()); - final AirbyteStream airbyteStream = CatalogHelpers.createAirbyteStream( - "id_and_bytes", - SCHEMA_NAME, - Field.of("id", JsonSchemaType.NUMBER), - Field.of("bytes", JsonSchemaType.STRING)) - .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) - .withSourceDefinedPrimaryKey(List.of(List.of("id"))); - final AirbyteCatalog airbyteCatalog = new AirbyteCatalog().withStreams(List.of(airbyteStream)); - final Set actualMessages = - MoreIterators.toSet(new PostgresSource().read(sourceConfig, CatalogHelpers.toDefaultConfiguredCatalog(airbyteCatalog), null)); - setEmittedAtToNull(actualMessages); + // Read the table contents using the non-default 'escape' format for bytea values. + final JsonNode sourceConfig = Jsons.jsonNode(ImmutableMap.builder() + .putAll(Jsons.flatten(dbConfig)) + .put(JdbcUtils.JDBC_URL_PARAMS_KEY, "options=-c%20statement_timeout=90000%20-c%20bytea_output=escape") + .build()); + final AirbyteStream airbyteStream = CatalogHelpers.createAirbyteStream( + "id_and_bytes", + SCHEMA_NAME, + Field.of("id", JsonSchemaType.NUMBER), + Field.of("bytes", JsonSchemaType.STRING)) + .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL)) + .withSourceDefinedPrimaryKey(List.of(List.of("id"))); + final AirbyteCatalog airbyteCatalog = new AirbyteCatalog().withStreams(List.of(airbyteStream)); + final Set actualMessages = + MoreIterators.toSet(source().read( + sourceConfig, + CatalogHelpers.toDefaultConfiguredCatalog(airbyteCatalog), + null)); + setEmittedAtToNull(actualMessages); - // Check that the 'options' JDBC URL parameter was parsed correctly - // and that the bytea value is not in the default 'hex' format. - assertEquals(1, actualMessages.size()); - final AirbyteMessage actualMessage = actualMessages.stream().findFirst().get(); - assertTrue(actualMessage.getRecord().getData().has("bytes")); - assertEquals("\\336\\255\\276\\357", actualMessage.getRecord().getData().get("bytes").asText()); - } + // Check that the 'options' JDBC URL parameter was parsed correctly + // and that the bytea value is not in the default 'hex' format. + assertEquals(1, actualMessages.size()); + final AirbyteMessage actualMessage = actualMessages.stream().findFirst().get(); + assertTrue(actualMessage.getRecord().getData().has("bytes")); + assertEquals("\\336\\255\\276\\357", actualMessage.getRecord().getData().get("bytes").asText()); } @Test @DisplayName("Make sure initial incremental load is reading records in a certain order") void testReadIncrementalRecordOrder() throws Exception { - final JsonNode config = getConfig(PSQL_DB, dbName); // We want to test ordering, so we can delete the NaN entry - try (final DSLContext dslContext = getDslContext(config)) { - final Database database = getDatabase(dslContext); - database.query(ctx -> { - ctx.fetch("DELETE FROM id_and_name WHERE id = 'NaN';"); - for (int i = 3; i < 1000; i++) { - ctx.fetch("INSERT INTO id_and_name (id, name, power) VALUES (%d, 'gohan%d', 222.1);".formatted(i, i)); - } - return null; - }); + testdb.database.query(ctx -> { + ctx.fetch("DELETE FROM id_and_name WHERE id = 'NaN';"); + for (int i = 3; i < 1000; i++) { + ctx.fetch("INSERT INTO id_and_name (id, name, power) VALUES (%d, 'gohan%d', 222.1);".formatted(i, i)); + } + return null; + }); - final ConfiguredAirbyteCatalog configuredCatalog = - CONFIGURED_INCR_CATALOG - .withStreams(CONFIGURED_INCR_CATALOG.getStreams().stream().filter(s -> s.getStream().getName().equals(STREAM_NAME)).collect( - Collectors.toList())); - final PostgresSource source = new PostgresSource(); - source.setStateEmissionFrequencyForDebug(1); - final List actualMessages = MoreIterators.toList(source.read(getConfig(PSQL_DB, dbName), configuredCatalog, null)); - setEmittedAtToNull(actualMessages); + final ConfiguredAirbyteCatalog configuredCatalog = + CONFIGURED_INCR_CATALOG + .withStreams(CONFIGURED_INCR_CATALOG.getStreams() + .stream() + .filter(s -> s.getStream().getName().equals(STREAM_NAME)) + .toList()); + final PostgresSource source = source(); + source.setStateEmissionFrequencyForDebug(1); + final List actualMessages = MoreIterators.toList(source.read(getConfig(), configuredCatalog, null)); + setEmittedAtToNull(actualMessages); - // final List stateAfterFirstBatch = extractStateMessage(actualMessages); + // final List stateAfterFirstBatch = extractStateMessage(actualMessages); - setEmittedAtToNull(actualMessages); + setEmittedAtToNull(actualMessages); - final Set expectedOutput = Sets.newHashSet( - createRecord(STREAM_NAME, SCHEMA_NAME, map("id", new BigDecimal("1.0"), "name", "goku", "power", null)), - createRecord(STREAM_NAME, SCHEMA_NAME, map("id", new BigDecimal("2.0"), "name", "vegeta", "power", 9000.1))); - for (int i = 3; i < 1000; i++) { - expectedOutput.add( - createRecord(STREAM_NAME, SCHEMA_NAME, map("id", new BigDecimal("%d.0".formatted(i)), "name", "gohan%d".formatted(i), "power", 222.1))); - } - assertThat(actualMessages.contains(expectedOutput)); - // Assert that the Postgres source is emitting records & state messages in the correct order. - assertCorrectRecordOrderForIncrementalSync(actualMessages, "id", JsonSchemaPrimitive.NUMBER, configuredCatalog, - new AirbyteStreamNameNamespacePair("id_and_name", "public")); + final Set expectedOutput = Sets.newHashSet( + createRecord(STREAM_NAME, SCHEMA_NAME, map("id", new BigDecimal("1.0"), "name", "goku", "power", null)), + createRecord(STREAM_NAME, SCHEMA_NAME, map("id", new BigDecimal("2.0"), "name", "vegeta", "power", 9000.1))); + for (int i = 3; i < 1000; i++) { + expectedOutput.add( + createRecord( + STREAM_NAME, + SCHEMA_NAME, + map("id", new BigDecimal("%d.0".formatted(i)), "name", "gohan%d".formatted(i), "power", 222.1))); } + assertThat(actualMessages.contains(expectedOutput)); + // Assert that the Postgres source is emitting records & state messages in the correct order. + assertCorrectRecordOrderForIncrementalSync(actualMessages, "id", JsonSchemaPrimitive.NUMBER, configuredCatalog, + new AirbyteStreamNameNamespacePair("id_and_name", "public")); } } diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresStressTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresStressTest.java index d1b22b8fab49..c68be6d1be11 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresStressTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresStressTest.java @@ -43,7 +43,7 @@ class PostgresStressTest extends JdbcStressTest { @BeforeAll static void init() { - PSQL_DB = new PostgreSQLContainer<>("postgres:13-alpine"); + PSQL_DB = new PostgreSQLContainer<>("postgres:16-bullseye"); PSQL_DB.start(); } diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/XminPostgresSourceTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/XminPostgresSourceTest.java index fc66e1c34492..1383d04e60d1 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/XminPostgresSourceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/XminPostgresSourceTest.java @@ -18,15 +18,12 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import io.airbyte.cdk.db.Database; -import io.airbyte.cdk.db.factory.DSLContextFactory; -import io.airbyte.cdk.db.factory.DatabaseDriver; import io.airbyte.cdk.db.jdbc.JdbcUtils; -import io.airbyte.cdk.testutils.PostgreSQLContainerHelper; +import io.airbyte.cdk.integrations.base.Source; +import io.airbyte.cdk.testutils.PostgresTestDatabase; import io.airbyte.commons.features.EnvVariableFeatureFlags; -import io.airbyte.commons.io.IOs; +import io.airbyte.commons.features.FeatureFlagsWrapper; import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.string.Strings; import io.airbyte.commons.util.MoreIterators; import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaType; @@ -41,6 +38,7 @@ import io.airbyte.protocol.models.v0.DestinationSyncMode; import io.airbyte.protocol.models.v0.SyncMode; import java.math.BigDecimal; +import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -48,24 +46,12 @@ import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.testcontainers.containers.PostgreSQLContainer; -import org.testcontainers.utility.MountableFile; -import uk.org.webcompere.systemstubs.environment.EnvironmentVariables; -import uk.org.webcompere.systemstubs.jupiter.SystemStub; -import uk.org.webcompere.systemstubs.jupiter.SystemStubsExtension; - -@ExtendWith(SystemStubsExtension.class) + class XminPostgresSourceTest { - @SystemStub - private EnvironmentVariables environmentVariables; private static final String SCHEMA_NAME = "public"; protected static final String STREAM_NAME = "id_and_name"; private static final AirbyteCatalog CATALOG = new AirbyteCatalog().withStreams(List.of( @@ -106,73 +92,45 @@ class XminPostgresSourceTest { protected static final List NEXT_RECORD_MESSAGES = Arrays.asList( createRecord(STREAM_NAME, SCHEMA_NAME, map("id", new BigDecimal("3.0"), "name", "gohan", "power", 222.1))); - protected static PostgreSQLContainer PSQL_DB; - - protected String dbName; + protected PostgresTestDatabase testdb; - @BeforeAll - static void init() { - PSQL_DB = new PostgreSQLContainer<>("postgres:13-alpine"); - PSQL_DB.start(); + protected String getDatabaseImageName() { + return "postgres:12-bullseye"; } @BeforeEach - void setup() throws Exception { - dbName = Strings.addRandomSuffix("db", "_", 10).toLowerCase(); - environmentVariables.set(EnvVariableFeatureFlags.USE_STREAM_CAPABLE_STATE, "true"); - - final String initScriptName = "init_" + dbName.concat(".sql"); - final String tmpFilePath = IOs.writeFileToRandomTmpDir(initScriptName, "CREATE DATABASE " + dbName + ";"); - PostgreSQLContainerHelper.runSqlScript(MountableFile.forHostPath(tmpFilePath), PSQL_DB); - - final JsonNode config = getXminConfig(PSQL_DB, dbName); - - try (final DSLContext dslContext = getDslContext(config)) { - final Database database = getDatabase(dslContext); - database.query(ctx -> { - ctx.fetch( - "CREATE TABLE id_and_name(id NUMERIC(20, 10) NOT NULL, name VARCHAR(200) NOT NULL, power double precision NOT NULL, PRIMARY KEY (id));"); - ctx.fetch("CREATE INDEX i1 ON id_and_name (id);"); - ctx.fetch( - "INSERT INTO id_and_name (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); - - ctx.fetch("CREATE TABLE id_and_name2(id NUMERIC(20, 10) NOT NULL, name VARCHAR(200) NOT NULL, power double precision NOT NULL);"); - ctx.fetch( - "INSERT INTO id_and_name2 (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); - - ctx.fetch( - "CREATE TABLE names(first_name VARCHAR(200) NOT NULL, last_name VARCHAR(200) NOT NULL, power double precision NOT NULL, PRIMARY KEY (first_name, last_name));"); - ctx.fetch( - "INSERT INTO names (first_name, last_name, power) VALUES ('san', 'goku', 'Infinity'), ('prince', 'vegeta', 9000.1), ('piccolo', 'junior', '-Infinity');"); - return null; - }); - } - } - - protected static Database getDatabase(final DSLContext dslContext) { - return new Database(dslContext); + protected void setup() throws SQLException { + testdb = PostgresTestDatabase.make(getDatabaseImageName()); + testdb.database.query(ctx -> { + ctx.fetch( + "CREATE TABLE id_and_name(id NUMERIC(20, 10) NOT NULL, name VARCHAR(200) NOT NULL, power double precision NOT NULL, PRIMARY KEY (id));"); + ctx.fetch("CREATE INDEX i1 ON id_and_name (id);"); + ctx.fetch("INSERT INTO id_and_name (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); + + ctx.fetch("CREATE TABLE id_and_name2(id NUMERIC(20, 10) NOT NULL, name VARCHAR(200) NOT NULL, power double precision NOT NULL);"); + ctx.fetch("INSERT INTO id_and_name2 (id, name, power) VALUES (1,'goku', 'Infinity'), (2, 'vegeta', 9000.1), ('NaN', 'piccolo', '-Infinity');"); + + ctx.fetch( + "CREATE TABLE names(first_name VARCHAR(200) NOT NULL, last_name VARCHAR(200) NOT NULL, power double precision NOT NULL, PRIMARY KEY (first_name, last_name));"); + ctx.fetch( + "INSERT INTO names (first_name, last_name, power) VALUES ('san', 'goku', 'Infinity'), ('prince', 'vegeta', 9000.1), ('piccolo', 'junior', '-Infinity');"); + return null; + }); } - protected static DSLContext getDslContext(final JsonNode config) { - return DSLContextFactory.create( - config.get(JdbcUtils.USERNAME_KEY).asText(), - config.get(JdbcUtils.PASSWORD_KEY).asText(), - DatabaseDriver.POSTGRESQL.getDriverClassName(), - String.format(DatabaseDriver.POSTGRESQL.getUrlFormatString(), - config.get(JdbcUtils.HOST_KEY).asText(), - config.get(JdbcUtils.PORT_KEY).asInt(), - config.get(JdbcUtils.DATABASE_KEY).asText()), - SQLDialect.POSTGRES); + @AfterEach + protected void tearDown() throws SQLException { + testdb.close(); } - protected JsonNode getXminConfig(final PostgreSQLContainer psqlDb, final String dbName) { + protected JsonNode getXminConfig() { return Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, psqlDb.getHost()) - .put(JdbcUtils.PORT_KEY, psqlDb.getFirstMappedPort()) - .put(JdbcUtils.DATABASE_KEY, dbName) + .put(JdbcUtils.HOST_KEY, testdb.container.getHost()) + .put(JdbcUtils.PORT_KEY, testdb.container.getFirstMappedPort()) + .put(JdbcUtils.DATABASE_KEY, testdb.dbName) .put(JdbcUtils.SCHEMAS_KEY, List.of(SCHEMA_NAME)) - .put(JdbcUtils.USERNAME_KEY, psqlDb.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, psqlDb.getPassword()) + .put(JdbcUtils.USERNAME_KEY, testdb.userName) + .put(JdbcUtils.PASSWORD_KEY, testdb.password) .put(JdbcUtils.SSL_KEY, false) .put("replication_method", getReplicationMethod()) .put(SYNC_CHECKPOINT_RECORDS_PROPERTY, 1) @@ -185,14 +143,15 @@ private JsonNode getReplicationMethod() { .build()); } - @AfterAll - static void cleanUp() { - PSQL_DB.close(); + protected Source source() { + PostgresSource source = new PostgresSource(); + source.setFeatureFlags(FeatureFlagsWrapper.overridingUseStreamCapableState(new EnvVariableFeatureFlags(), true)); + return PostgresSource.sshWrappedSource(source); } @Test void testDiscover() throws Exception { - final AirbyteCatalog actual = new PostgresSource().discover(getXminConfig(PSQL_DB, dbName)); + final AirbyteCatalog actual = source().discover(getXminConfig()); actual.getStreams().forEach(actualStream -> { final Optional expectedStream = CATALOG.getStreams().stream().filter(stream -> stream.getName().equals(actualStream.getName())).findAny(); @@ -210,7 +169,7 @@ void testReadSuccess() throws Exception { .withStreams(CONFIGURED_XMIN_CATALOG.getStreams().stream().filter(s -> s.getStream().getName().equals(STREAM_NAME)).collect( Collectors.toList())); final List recordsFromFirstSync = - MoreIterators.toList(new PostgresSource().read(getXminConfig(PSQL_DB, dbName), configuredCatalog, null)); + MoreIterators.toList(source().read(getXminConfig(), configuredCatalog, null)); setEmittedAtToNull(recordsFromFirstSync); assertThat(filterRecords(recordsFromFirstSync)).containsExactlyElementsOf(INITIAL_RECORD_MESSAGES); @@ -257,7 +216,7 @@ void testReadSuccess() throws Exception { // Sync should work with a ctid state final List recordsFromSyncRunningWithACtidState = - MoreIterators.toList(new PostgresSource().read(getXminConfig(PSQL_DB, dbName), configuredCatalog, + MoreIterators.toList(source().read(getXminConfig(), configuredCatalog, Jsons.jsonNode(Collections.singletonList(firstStateMessage)))); setEmittedAtToNull(recordsFromSyncRunningWithACtidState); final List expectedDataFromSyncUsingFirstCtidState = new ArrayList<>(2); @@ -281,7 +240,7 @@ void testReadSuccess() throws Exception { // Read with the final xmin state message should return no data final List syncWithXminStateType = - MoreIterators.toList(new PostgresSource().read(getXminConfig(PSQL_DB, dbName), configuredCatalog, + MoreIterators.toList(source().read(getXminConfig(), configuredCatalog, Jsons.jsonNode(Collections.singletonList(thirdStateMessage)))); setEmittedAtToNull(syncWithXminStateType); assertEquals(0, filterRecords(syncWithXminStateType).size()); @@ -295,17 +254,14 @@ void testReadSuccess() throws Exception { // We add some data and perform a third read. We should verify that (i) a delete is not captured and // (ii) the new record that is inserted into the // table is read. - try (final DSLContext dslContext = getDslContext(getXminConfig(PSQL_DB, dbName))) { - final Database database = getDatabase(dslContext); - database.query(ctx -> { - ctx.fetch("DELETE FROM id_and_name WHERE id = 'NaN';"); - ctx.fetch("INSERT INTO id_and_name (id, name, power) VALUES (3, 'gohan', 222.1);"); - return null; - }); - } + testdb.database.query(ctx -> { + ctx.fetch("DELETE FROM id_and_name WHERE id = 'NaN';"); + ctx.fetch("INSERT INTO id_and_name (id, name, power) VALUES (3, 'gohan', 222.1);"); + return null; + }); final List recordsAfterLastSync = - MoreIterators.toList(new PostgresSource().read(getXminConfig(PSQL_DB, dbName), configuredCatalog, + MoreIterators.toList(source().read(getXminConfig(), configuredCatalog, Jsons.jsonNode(Collections.singletonList(stateAfterXminSync.get(0))))); setEmittedAtToNull(recordsAfterLastSync); assertThat(filterRecords(recordsAfterLastSync)).containsExactlyElementsOf(NEXT_RECORD_MESSAGES); diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/XminPostgresWithOldServerSourceTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/XminPostgresWithOldServerSourceTest.java index 9edc1ecd91ff..731c3c423471 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/XminPostgresWithOldServerSourceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/XminPostgresWithOldServerSourceTest.java @@ -12,7 +12,6 @@ import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; -import io.airbyte.cdk.db.Database; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.MoreIterators; import io.airbyte.protocol.models.v0.AirbyteMessage; @@ -21,17 +20,13 @@ import java.util.Collections; import java.util.List; import java.util.stream.Collectors; -import org.jooq.DSLContext; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; -import org.testcontainers.containers.PostgreSQLContainer; public class XminPostgresWithOldServerSourceTest extends XminPostgresSourceTest { - @BeforeAll - static void init() { - PSQL_DB = new PostgreSQLContainer<>("postgres:9-alpine"); - PSQL_DB.start(); + @Override + protected String getDatabaseImageName() { + return "postgres:9-alpine"; } @Test @@ -44,7 +39,7 @@ void testReadSuccess() throws Exception { .withStreams(CONFIGURED_XMIN_CATALOG.getStreams().stream().filter(s -> s.getStream().getName().equals(STREAM_NAME)).collect( Collectors.toList())); final List recordsFromFirstSync = - MoreIterators.toList(new PostgresSource().read(getXminConfig(PSQL_DB, dbName), configuredCatalog, null)); + MoreIterators.toList(source().read(getXminConfig(), configuredCatalog, null)); setEmittedAtToNull(recordsFromFirstSync); assertThat(filterRecords(recordsFromFirstSync)).containsExactlyElementsOf(INITIAL_RECORD_MESSAGES); @@ -67,7 +62,7 @@ void testReadSuccess() throws Exception { // Read with the final xmin state message should return no data final List syncWithXminStateType = - MoreIterators.toList(new PostgresSource().read(getXminConfig(PSQL_DB, dbName), configuredCatalog, + MoreIterators.toList(source().read(getXminConfig(), configuredCatalog, Jsons.jsonNode(Collections.singletonList(firstSyncStateMessage)))); setEmittedAtToNull(syncWithXminStateType); assertEquals(0, filterRecords(syncWithXminStateType).size()); @@ -81,17 +76,14 @@ void testReadSuccess() throws Exception { // We add some data and perform a third read. We should verify that (i) a delete is not captured and // (ii) the new record that is inserted into the // table is read. - try (final DSLContext dslContext = getDslContext(getXminConfig(PSQL_DB, dbName))) { - final Database database = getDatabase(dslContext); - database.query(ctx -> { - ctx.fetch("DELETE FROM id_and_name WHERE id = 'NaN';"); - ctx.fetch("INSERT INTO id_and_name (id, name, power) VALUES (3, 'gohan', 222.1);"); - return null; - }); - } + testdb.database.query(ctx -> { + ctx.fetch("DELETE FROM id_and_name WHERE id = 'NaN';"); + ctx.fetch("INSERT INTO id_and_name (id, name, power) VALUES (3, 'gohan', 222.1);"); + return null; + }); final List recordsAfterLastSync = - MoreIterators.toList(new PostgresSource().read(getXminConfig(PSQL_DB, dbName), configuredCatalog, + MoreIterators.toList(source().read(getXminConfig(), configuredCatalog, Jsons.jsonNode(Collections.singletonList(stateAfterXminSync.get(0))))); setEmittedAtToNull(recordsAfterLastSync); assertThat(filterRecords(recordsAfterLastSync)).containsExactlyElementsOf(NEXT_RECORD_MESSAGES);