From fa0a6375b7e7962f0d07e88bc91925f7b1154d24 Mon Sep 17 00:00:00 2001 From: Teng YU Date: Thu, 11 Jan 2024 17:42:33 +0100 Subject: [PATCH] Update according to reviews 11/01/2024 --- docs/src/main/sphinx/connector/snowflake.md | 2 - plugin/trino-snowflake/pom.xml | 8 +- .../plugin/snowflake/SnowflakeClient.java | 157 +++--------------- .../snowflake/SnowflakeClientModule.java | 5 +- .../plugin/snowflake/SnowflakeConfig.java | 7 - .../snowflake/BaseSnowflakeConnectorTest.java | 4 - .../snowflake/SnowflakeQueryRunner.java | 2 - .../plugin/snowflake/TestSnowflakeConfig.java | 6 +- .../snowflake/TestSnowflakeConnectorTest.java | 3 +- .../snowflake/TestSnowflakeTypeMapping.java | 4 - 10 files changed, 37 insertions(+), 161 deletions(-) diff --git a/docs/src/main/sphinx/connector/snowflake.md b/docs/src/main/sphinx/connector/snowflake.md index 579764534e230..1ca16df602f2b 100644 --- a/docs/src/main/sphinx/connector/snowflake.md +++ b/docs/src/main/sphinx/connector/snowflake.md @@ -71,8 +71,6 @@ Trino supports the following Snowflake data types: | `date` | `date` | | `time` | `time` | | `timestampntz` | `timestamp` | -| `timestamptz` | `timestampTZ` | -| `timestampltz` | `timestampTZ` | Complete list of [Snowflake data types](https://docs.snowflake.com/en/sql-reference/intro-summary-data-types.html). diff --git a/plugin/trino-snowflake/pom.xml b/plugin/trino-snowflake/pom.xml index 33f14d2a93990..74fb2e4a12812 100644 --- a/plugin/trino-snowflake/pom.xml +++ b/plugin/trino-snowflake/pom.xml @@ -5,7 +5,7 @@ io.trino trino-root - 435-SNAPSHOT + 440-SNAPSHOT ../../pom.xml @@ -93,6 +93,12 @@ + + io.airlift + junit-extensions + test + + io.airlift testing diff --git a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClient.java b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClient.java index c68e786e2efeb..35fcb6dfc4074 100644 --- a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClient.java +++ b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClient.java @@ -17,7 +17,6 @@ import com.google.common.collect.ImmutableSet; import com.google.inject.Inject; import io.airlift.log.Logger; -import io.airlift.slice.Slices; import io.trino.plugin.base.aggregation.AggregateFunctionRewriter; import io.trino.plugin.base.aggregation.AggregateFunctionRule; import io.trino.plugin.base.expression.ConnectorExpressionRewriter; @@ -92,10 +91,10 @@ import java.util.function.BiFunction; import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.util.concurrent.MoreExecutors.directExecutor; +import static io.airlift.slice.Slices.utf8Slice; import static io.trino.plugin.jdbc.JdbcErrorCode.JDBC_ERROR; import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED; -import static io.trino.spi.type.DecimalType.createDecimalType; +import static io.trino.spi.type.TimestampWithTimeZoneType.MAX_SHORT_PRECISION; import static io.trino.spi.type.TimestampWithTimeZoneType.createTimestampWithTimeZoneType; import static io.trino.spi.type.Timestamps.MILLISECONDS_PER_SECOND; import static io.trino.spi.type.Timestamps.NANOSECONDS_PER_MILLISECOND; @@ -110,11 +109,11 @@ public class SnowflakeClient /* TIME supports an optional precision parameter for fractional seconds, e.g. TIME(3). Time precision can range from 0 (seconds) to 9 (nanoseconds). The default precision is 9. All TIME values must be between 00:00:00 and 23:59:59.999999999. TIME internally stores “wallclock” time, and all operations on TIME values are performed without taking any time zone into consideration. */ - private static final int SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION = 9; + private static final int MAX_SUPPORTED_TEMPORAL_PRECISION = 9; private static final Logger log = Logger.get(SnowflakeClient.class); - private static final DateTimeFormatter SNOWFLAKE_DATETIME_FORMATTER = DateTimeFormatter.ofPattern("y-MM-dd'T'HH:mm:ss.SSSSSSSSSXXX"); + private static final DateTimeFormatter SNOWFLAKE_DATETIME_FORMATTER = DateTimeFormatter.ofPattern("u-MM-dd'T'HH:mm:ss.SSSSSSSSSXXX"); private static final DateTimeFormatter SNOWFLAKE_DATE_FORMATTER = DateTimeFormatter.ofPattern("uuuu-MM-dd"); - private static final DateTimeFormatter SNOWFLAKE_TIMESTAMP_FORMATTER = DateTimeFormatter.ofPattern("y-MM-dd'T'HH:mm:ss.SSSSSSSSS"); + private static final DateTimeFormatter SNOWFLAKE_TIMESTAMP_FORMATTER = DateTimeFormatter.ofPattern("u-MM-dd'T'HH:mm:ss.SSSSSSSSS"); private static final DateTimeFormatter SNOWFLAKE_TIME_FORMATTER = DateTimeFormatter.ofPattern("HH:mm:ss.SSSSSSSSS"); private final AggregateFunctionRewriter aggregateFunctionRewriter; @@ -129,81 +128,6 @@ private interface ColumnMappingFunction } private static final TimeZone UTC_TZ = TimeZone.getTimeZone(ZoneId.of("UTC")); - // Mappings for JDBC column types to internal Trino types - private static final Map STANDARD_COLUMN_MAPPINGS = ImmutableMap.builder() - .put(Types.BOOLEAN, StandardColumnMappings.booleanColumnMapping()) - .put(Types.TINYINT, StandardColumnMappings.tinyintColumnMapping()) - .put(Types.SMALLINT, StandardColumnMappings.smallintColumnMapping()) - .put(Types.INTEGER, StandardColumnMappings.integerColumnMapping()) - .put(Types.BIGINT, StandardColumnMappings.bigintColumnMapping()) - .put(Types.REAL, StandardColumnMappings.realColumnMapping()) - .put(Types.DOUBLE, StandardColumnMappings.doubleColumnMapping()) - .put(Types.FLOAT, StandardColumnMappings.doubleColumnMapping()) - .put(Types.BINARY, StandardColumnMappings.varbinaryColumnMapping()) - .put(Types.VARBINARY, StandardColumnMappings.varbinaryColumnMapping()) - .put(Types.LONGVARBINARY, StandardColumnMappings.varbinaryColumnMapping()) - .buildOrThrow(); - - private static final Map SHOWFLAKE_COLUMN_MAPPINGS = ImmutableMap.builder() - .put("time", typeHandle -> Optional.of(timeColumnMapping(typeHandle))) - .put("timestampntz", typeHandle -> Optional.of(timestampColumnMapping(typeHandle))) - .put("timestamptz", typeHandle -> Optional.of(timestampTzColumnMapping(typeHandle))) - .put("timestampltz", typeHandle -> Optional.of(timestampTzColumnMapping(typeHandle))) - .put("date", typeHandle -> Optional.of(ColumnMapping.longMapping( - DateType.DATE, - (resultSet, columnIndex) -> LocalDate.ofEpochDay(resultSet.getLong(columnIndex)).toEpochDay(), - snowFlakeDateWriter()))) - .put("object", typeHandle -> Optional.of(ColumnMapping.sliceMapping( - createUnboundedVarcharType(), - StandardColumnMappings.varcharReadFunction(createUnboundedVarcharType()), - StandardColumnMappings.varcharWriteFunction(), - PredicatePushdownController.DISABLE_PUSHDOWN))) - .put("array", typeHandle -> Optional.of(ColumnMapping.sliceMapping( - createUnboundedVarcharType(), - StandardColumnMappings.varcharReadFunction(createUnboundedVarcharType()), - StandardColumnMappings.varcharWriteFunction(), - PredicatePushdownController.DISABLE_PUSHDOWN))) - .put("variant", typeHandle -> Optional.of(ColumnMapping.sliceMapping( - createUnboundedVarcharType(), - variantReadFunction(), - StandardColumnMappings.varcharWriteFunction(), - PredicatePushdownController.FULL_PUSHDOWN))) - .put("varchar", typeHandle -> Optional.of(varcharColumnMapping(typeHandle.getRequiredColumnSize()))) - .put("number", typeHandle -> { - int decimalDigits = typeHandle.getRequiredDecimalDigits(); - int precision = typeHandle.getRequiredColumnSize() + Math.max(-decimalDigits, 0); - if (precision > 38) { - return Optional.empty(); - } - return Optional.of(columnMappingPushdown( - StandardColumnMappings.decimalColumnMapping(createDecimalType(precision, Math.max(decimalDigits, 0)), RoundingMode.UNNECESSARY))); - }) - .buildOrThrow(); - - // Mappings for internal Trino types to JDBC column types - private static final Map STANDARD_WRITE_MAPPINGS = ImmutableMap.builder() - .put("BooleanType", WriteMapping.booleanMapping("boolean", StandardColumnMappings.booleanWriteFunction())) - .put("BigintType", WriteMapping.longMapping("number(19)", StandardColumnMappings.bigintWriteFunction())) - .put("IntegerType", WriteMapping.longMapping("number(10)", StandardColumnMappings.integerWriteFunction())) - .put("SmallintType", WriteMapping.longMapping("number(5)", StandardColumnMappings.smallintWriteFunction())) - .put("TinyintType", WriteMapping.longMapping("number(3)", StandardColumnMappings.tinyintWriteFunction())) - .put("DoubleType", WriteMapping.doubleMapping("double precision", StandardColumnMappings.doubleWriteFunction())) - .put("RealType", WriteMapping.longMapping("real", StandardColumnMappings.realWriteFunction())) - .put("VarbinaryType", WriteMapping.sliceMapping("varbinary", StandardColumnMappings.varbinaryWriteFunction())) - .put("DateType", WriteMapping.longMapping("date", snowFlakeDateWriter())) - .buildOrThrow(); - - private static final Map SNOWFLAKE_WRITE_MAPPINGS = ImmutableMap.builder() - .put("TimeType", type -> WriteMapping.longMapping("time", SnowflakeClient.snowFlaketimeWriter(type))) - .put("ShortTimestampType", SnowflakeClient::snowFlakeTimestampWriter) - .put("ShortTimestampWithTimeZoneType", SnowflakeClient::snowFlakeTimestampWithTZWriter) - .put("LongTimestampType", SnowflakeClient::snowFlakeTimestampWithTZWriter) - .put("LongTimestampWithTimeZoneType", SnowflakeClient::snowFlakeTimestampWithTZWriter) - .put("VarcharType", SnowflakeClient::snowFlakeVarCharWriter) - .put("CharType", SnowflakeClient::snowFlakeCharWriter) - .put("LongDecimalType", SnowflakeClient::snowFlakeDecimalWriter) - .put("ShortDecimalType", SnowflakeClient::snowFlakeDecimalWriter) - .buildOrThrow(); @Inject public SnowflakeClient( @@ -232,23 +156,9 @@ public SnowflakeClient( .build()); } - @Override - public void abortReadConnection(Connection connection, ResultSet resultSet) - throws SQLException - { - // Abort connection before closing. Without this, the Snowflake driver - // attempts to drain the connection by reading all the results. - connection.abort(directExecutor()); - } - @Override public Optional toColumnMapping(ConnectorSession session, Connection connection, JdbcTypeHandle typeHandle) { - Optional mapping = getForcedMappingToVarchar(typeHandle); - if (mapping.isPresent()) { - return mapping; - } - String jdbcTypeName = typeHandle.getJdbcTypeName() .orElseThrow(() -> new TrinoException(JDBC_ERROR, "Type name is missing: " + typeHandle)); jdbcTypeName = jdbcTypeName.toLowerCase(Locale.ENGLISH); @@ -275,18 +185,9 @@ public Optional toColumnMapping(ConnectorSession session, Connect } final Map snowflakeColumnMappings = ImmutableMap.builder() - .put("time", handle -> { - return Optional.of(timeColumnMapping(handle)); - }) - .put("date", handle -> { - return Optional.of(ColumnMapping.longMapping( - DateType.DATE, (resultSet, columnIndex) -> - LocalDate.ofEpochDay(resultSet.getLong(columnIndex)).toEpochDay(), - snowFlakeDateWriter())); - }) - .put("varchar", handle -> { - return Optional.of(varcharColumnMapping(handle.getRequiredColumnSize())); - }) + .put("time", handle -> { return Optional.of(timeColumnMapping(handle.getRequiredDecimalDigits())); }) + .put("date", handle -> { return Optional.of(ColumnMapping.longMapping(DateType.DATE, (resultSet, columnIndex) -> LocalDate.ofEpochDay(resultSet.getLong(columnIndex)).toEpochDay(), snowFlakeDateWriter())); }) + .put("varchar", handle -> { return Optional.of(varcharColumnMapping(handle.getRequiredColumnSize())); }) .put("number", handle -> { int decimalDigits = handle.getRequiredDecimalDigits(); int precision = handle.getRequiredColumnSize() + Math.max(-decimalDigits, 0); @@ -294,8 +195,7 @@ public Optional toColumnMapping(ConnectorSession session, Connect return Optional.empty(); } return Optional.of(columnMappingPushdown( - StandardColumnMappings.decimalColumnMapping(DecimalType.createDecimalType( - precision, Math.max(decimalDigits, 0)), RoundingMode.UNNECESSARY))); + StandardColumnMappings.decimalColumnMapping(DecimalType.createDecimalType(precision, Math.max(decimalDigits, 0)), RoundingMode.UNNECESSARY))); }) .buildOrThrow(); @@ -305,7 +205,7 @@ public Optional toColumnMapping(ConnectorSession session, Connect } // Code should never reach here so throw an error. - throw new TrinoException(NOT_SUPPORTED, "SNOWFLAKE_CONNECTOR_COLUMN_TYPE_NOT_SUPPORTED: Unsupported column type(" + type + "):" + jdbcTypeName); + throw new TrinoException(NOT_SUPPORTED, "Unsupported column type(" + type + "):" + jdbcTypeName); } @Override @@ -334,7 +234,7 @@ public WriteMapping toWriteMapping(ConnectorSession session, Type type) final Map snowflakeWriteMappings = ImmutableMap.builder() .put("TimeType", writeType -> { - return WriteMapping.longMapping("time", SnowflakeClient.snowFlaketimeWriter(writeType)); + return WriteMapping.longMapping("time", timeWriteFunction(((TimeType) writeType).getPrecision())); }) .put("ShortTimestampType", writeType -> { WriteMapping myMap = SnowflakeClient.snowFlakeTimestampWriter(writeType); @@ -375,7 +275,7 @@ public WriteMapping toWriteMapping(ConnectorSession session, Type type) return writeMappingFunction.convert(type); } - throw new TrinoException(NOT_SUPPORTED, "SNOWFLAKE_CONNECTOR_COLUMN_TYPE_NOT_SUPPORTED: Unsupported column type: " + type.getDisplayName() + ", simple:" + simple); + throw new TrinoException(NOT_SUPPORTED, "Unsupported column type: " + type.getDisplayName() + ", simple:" + simple); } @Override @@ -410,7 +310,7 @@ public void setColumnType(ConnectorSession session, JdbcTableHandle handle, Jdbc private static SliceReadFunction variantReadFunction() { - return (resultSet, columnIndex) -> Slices.utf8Slice(resultSet.getString(columnIndex).replaceAll("^\"|\"$", "")); + return (resultSet, columnIndex) -> utf8Slice(resultSet.getString(columnIndex).replaceAll("^\"|\"$", "")); } private static ColumnMapping columnMappingPushdown(ColumnMapping mapping) @@ -422,10 +322,9 @@ private static ColumnMapping columnMappingPushdown(ColumnMapping mapping) return new ColumnMapping(mapping.getType(), mapping.getReadFunction(), mapping.getWriteFunction(), PredicatePushdownController.FULL_PUSHDOWN); } - private static ColumnMapping timeColumnMapping(JdbcTypeHandle typeHandle) + private static ColumnMapping timeColumnMapping(int precision) { - int precision = typeHandle.getRequiredDecimalDigits(); - checkArgument(precision <= SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION, "The max timestamp precision in Snowflake is " + SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION); + checkArgument(precision <= MAX_SUPPORTED_TEMPORAL_PRECISION, "The max timestamp precision in Snowflake is " + MAX_SUPPORTED_TEMPORAL_PRECISION); return ColumnMapping.longMapping( TimeType.createTimeType(precision), (resultSet, columnIndex) -> { @@ -436,21 +335,15 @@ private static ColumnMapping timeColumnMapping(JdbcTypeHandle typeHandle) PredicatePushdownController.FULL_PUSHDOWN); } - private static LongWriteFunction snowFlaketimeWriter(Type type) - { - return timeWriteFunction(((TimeType) type).getPrecision()); - } - private static LongWriteFunction timeWriteFunction(int precision) { - checkArgument(precision <= SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION, "Unsupported precision: %s", precision); - String bindExpression = format("CAST(? AS time(%s))", precision); + checkArgument(precision <= MAX_SUPPORTED_TEMPORAL_PRECISION, "Unsupported precision: %s", precision); return new LongWriteFunction() { @Override public String getBindExpression() { - return bindExpression; + return format("CAST(? AS time(%s))", precision); } @Override @@ -474,16 +367,14 @@ private static ColumnMapping timestampTzColumnMapping(JdbcTypeHandle typeHandle) String jdbcTypeName = typeHandle.getJdbcTypeName() .orElseThrow(() -> new TrinoException(JDBC_ERROR, "Type name is missing: " + typeHandle)); int type = typeHandle.getJdbcType(); - log.debug("timestampTZColumnMapping: jdbcTypeName(%s):%s precision:%s", type, jdbcTypeName, precision); - - if (precision <= 3) { + if (precision <= MAX_SHORT_PRECISION) { return ColumnMapping.longMapping( createTimestampWithTimeZoneType(precision), (resultSet, columnIndex) -> { ZonedDateTime timestamp = SNOWFLAKE_DATETIME_FORMATTER.parse(resultSet.getString(columnIndex), ZonedDateTime::from); return DateTimeEncoding.packDateTimeWithZone(timestamp.toInstant().toEpochMilli(), timestamp.getZone().getId()); }, - timestampWithTZWriter(), + timestampWithTimezoneWriteFunction(), PredicatePushdownController.FULL_PUSHDOWN); } else { @@ -568,8 +459,8 @@ private static WriteMapping snowFlakeTimestampWriter(Type type) { TimestampType timestampType = (TimestampType) type; checkArgument( - timestampType.getPrecision() <= SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION, - "The max timestamp precision in Snowflake is " + SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION); + timestampType.getPrecision() <= MAX_SUPPORTED_TEMPORAL_PRECISION, + "The max timestamp precision in Snowflake is " + MAX_SUPPORTED_TEMPORAL_PRECISION); if (timestampType.isShort()) { return WriteMapping.longMapping(format("timestamp_ntz(%d)", timestampType.getPrecision()), timestampWriteFunction()); @@ -593,14 +484,14 @@ private static WriteMapping snowFlakeTimestampWithTZWriter(Type type) { TimestampWithTimeZoneType timeTZType = (TimestampWithTimeZoneType) type; - checkArgument(timeTZType.getPrecision() <= SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION, "Max Snowflake precision is is " + SNOWFLAKE_MAX_SUPPORTED_TIMESTAMP_PRECISION); + checkArgument(timeTZType.getPrecision() <= MAX_SUPPORTED_TEMPORAL_PRECISION, "Max Snowflake precision is is " + MAX_SUPPORTED_TEMPORAL_PRECISION); if (timeTZType.isShort()) { - return WriteMapping.longMapping(format("timestamp_tz(%d)", timeTZType.getPrecision()), timestampWithTZWriter()); + return WriteMapping.longMapping(format("timestamp_tz(%d)", timeTZType.getPrecision()), timestampWithTimezoneWriteFunction()); } return WriteMapping.objectMapping(format("timestamp_tz(%d)", timeTZType.getPrecision()), longTimestampWithTzWriteFunction()); } - private static LongWriteFunction timestampWithTZWriter() + private static LongWriteFunction timestampWithTimezoneWriteFunction() { return (statement, index, encodedTimeWithZone) -> { Instant instant = Instant.ofEpochMilli(DateTimeEncoding.unpackMillisUtc(encodedTimeWithZone)); diff --git a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClientModule.java b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClientModule.java index 19fc358471915..587ca8d11faab 100644 --- a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClientModule.java +++ b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeClientModule.java @@ -18,6 +18,7 @@ import com.google.inject.Provides; import com.google.inject.Scopes; import com.google.inject.Singleton; +import io.opentelemetry.api.OpenTelemetry; import io.trino.plugin.jdbc.BaseJdbcConfig; import io.trino.plugin.jdbc.ConnectionFactory; import io.trino.plugin.jdbc.DriverConnectionFactory; @@ -49,7 +50,7 @@ public void configure(Binder binder) @Singleton @Provides @ForBaseJdbc - public ConnectionFactory getConnectionFactory(BaseJdbcConfig baseJdbcConfig, SnowflakeConfig snowflakeConfig, CredentialProvider credentialProvider) + public ConnectionFactory getConnectionFactory(BaseJdbcConfig baseJdbcConfig, SnowflakeConfig snowflakeConfig, CredentialProvider credentialProvider, OpenTelemetry openTelemetry) throws MalformedURLException { Properties properties = new Properties(); @@ -90,6 +91,6 @@ public ConnectionFactory getConnectionFactory(BaseJdbcConfig baseJdbcConfig, Sno } } - return new DriverConnectionFactory(new SnowflakeDriver(), baseJdbcConfig.getConnectionUrl(), properties, credentialProvider); + return new DriverConnectionFactory(new SnowflakeDriver(), baseJdbcConfig.getConnectionUrl(), properties, credentialProvider, openTelemetry); } } diff --git a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeConfig.java b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeConfig.java index 6dbf125201774..c002728f85b76 100644 --- a/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeConfig.java +++ b/plugin/trino-snowflake/src/main/java/io/trino/plugin/snowflake/SnowflakeConfig.java @@ -79,13 +79,6 @@ public Optional getTimestampNoTimezoneAsUTC() return Optional.ofNullable(timestampNoTimezoneAsUTC); } - @Config("snowflake.timestamp-no-timezone-as-utc") - public SnowflakeConfig setTimestampNoTimezoneAsUTC(Boolean timestampNoTimezoneAsUTC) - { - this.timestampNoTimezoneAsUTC = timestampNoTimezoneAsUTC; - return this; - } - public Optional getHTTPProxy() { return Optional.ofNullable(httpProxy); diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/BaseSnowflakeConnectorTest.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/BaseSnowflakeConnectorTest.java index b547624cebeac..0b64ddd61ee1c 100644 --- a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/BaseSnowflakeConnectorTest.java +++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/BaseSnowflakeConnectorTest.java @@ -59,12 +59,10 @@ protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior) return false; case SUPPORTS_COMMENT_ON_COLUMN: case SUPPORTS_ADD_COLUMN_WITH_COMMENT: - case SUPPORTS_COMMENT_ON_TABLE: case SUPPORTS_CREATE_TABLE_WITH_TABLE_COMMENT: case SUPPORTS_CREATE_TABLE_WITH_COLUMN_COMMENT: case SUPPORTS_SET_COLUMN_TYPE: return false; - case SUPPORTS_DROP_FIELD: case SUPPORTS_ROW_TYPE: case SUPPORTS_ARRAY: return false; @@ -323,7 +321,6 @@ public void testCreateTableAsSelect() "SELECT 1234567890, 123", "SELECT count(*) + 1 FROM nation"); - // TODO: BigQuery throws table not found at BigQueryClient.insert if we reuse the same table name tableName = "test_ctas" + randomNameSuffix(); assertExplainAnalyze("EXPLAIN ANALYZE CREATE TABLE " + tableName + " AS SELECT name FROM nation"); assertQuery("SELECT * from " + tableName, "SELECT name FROM nation"); @@ -357,7 +354,6 @@ public void testCreateTable() assertQueryFails("CREATE TABLE " + tableName + " (a bad_type)", ".* Unknown type 'bad_type' for column 'a'"); assertFalse(getQueryRunner().tableExists(getSession(), tableName)); - // TODO (https://github.com/trinodb/trino/issues/5901) revert to longer name when Oracle version is updated tableName = "test_cr_not_exists_" + randomNameSuffix(); assertUpdate("CREATE TABLE " + tableName + " (a bigint, b varchar(50), c double)"); assertTrue(getQueryRunner().tableExists(getSession(), tableName)); diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/SnowflakeQueryRunner.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/SnowflakeQueryRunner.java index a50debaf003b6..2f877068f88af 100644 --- a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/SnowflakeQueryRunner.java +++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/SnowflakeQueryRunner.java @@ -36,7 +36,6 @@ public final class SnowflakeQueryRunner private SnowflakeQueryRunner() {} public static DistributedQueryRunner createSnowflakeQueryRunner( - TestingSnowflakeServer server, Map extraProperties, Map connectorProperties, Iterable> tables) @@ -85,7 +84,6 @@ public static void main(String[] args) throws Exception { DistributedQueryRunner queryRunner = createSnowflakeQueryRunner( - new TestingSnowflakeServer(), ImmutableMap.of("http-server.http.port", "8080"), ImmutableMap.of(), ImmutableList.of()); diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConfig.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConfig.java index eb5c32a3d063c..93b4dc8dff9b0 100644 --- a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConfig.java +++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConfig.java @@ -32,8 +32,7 @@ public void testDefaults() .setDatabase(null) .setRole(null) .setWarehouse(null) - .setHTTPProxy(null) - .setTimestampNoTimezoneAsUTC(null)); + .setHTTPProxy(null)); } @Test @@ -53,8 +52,7 @@ public void testExplicitPropertyMappings() .setDatabase("MYDATABASE") .setRole("MYROLE") .setWarehouse("MYWAREHOUSE") - .setHTTPProxy("MYPROXY") - .setTimestampNoTimezoneAsUTC(true); + .setHTTPProxy("MYPROXY"); assertFullMapping(properties, expected); } diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConnectorTest.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConnectorTest.java index 8b9b0c78c73b5..b448e5756c0b0 100644 --- a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConnectorTest.java +++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeConnectorTest.java @@ -26,8 +26,7 @@ public class TestSnowflakeConnectorTest protected QueryRunner createQueryRunner() throws Exception { - server = closeAfterClass(new TestingSnowflakeServer()); - return createSnowflakeQueryRunner(server, ImmutableMap.of(), ImmutableMap.of(), REQUIRED_TPCH_TABLES); + return createSnowflakeQueryRunner(ImmutableMap.of(), ImmutableMap.of(), REQUIRED_TPCH_TABLES); } @Override diff --git a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeTypeMapping.java b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeTypeMapping.java index 5377e013a6cd8..1e7a28572b6e8 100644 --- a/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeTypeMapping.java +++ b/plugin/trino-snowflake/src/test/java/io/trino/plugin/snowflake/TestSnowflakeTypeMapping.java @@ -65,7 +65,6 @@ public class TestSnowflakeTypeMapping @BeforeAll public void setUp() { - String zone = jvmZone.getId(); checkState(jvmZone.getId().equals("America/Bahia_Banderas"), "Timezone not configured correctly. Add -Duser.timezone=America/Bahia_Banderas to your JVM arguments"); checkIsGap(jvmZone, LocalDate.of(1970, 1, 1)); checkIsGap(vilnius, LocalDate.of(1983, 4, 1)); @@ -76,9 +75,7 @@ public void setUp() protected QueryRunner createQueryRunner() throws Exception { - snowflakeServer = new TestingSnowflakeServer(); return createSnowflakeQueryRunner( - snowflakeServer, ImmutableMap.of(), ImmutableMap.of(), ImmutableList.of()); @@ -322,7 +319,6 @@ private void testTimestamp(ZoneId sessionZone) .build(); SqlDataTypeTest.create() - // after epoch (MariaDb's timestamp type doesn't support values <= epoch) .addRoundTrip("timestamp(3)", "TIMESTAMP '2019-03-18 10:01:17.987'", createTimestampType(3), "TIMESTAMP '2019-03-18 10:01:17.987'") // time doubled in JVM zone .addRoundTrip("timestamp(3)", "TIMESTAMP '2018-10-28 01:33:17.456'", createTimestampType(3), "TIMESTAMP '2018-10-28 01:33:17.456'")