diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/metadata.yaml b/airbyte-integrations/connectors/destination-s3-data-lake/metadata.yaml index 793e9e54b179..186b6a65c4d8 100644 --- a/airbyte-integrations/connectors/destination-s3-data-lake/metadata.yaml +++ b/airbyte-integrations/connectors/destination-s3-data-lake/metadata.yaml @@ -26,7 +26,7 @@ data: alias: airbyte-connector-testing-secret-store connectorType: destination definitionId: 716ca874-520b-4902-9f80-9fad66754b89 - dockerImageTag: 0.2.9 + dockerImageTag: 0.2.10 dockerRepository: airbyte/destination-s3-data-lake documentationUrl: https://docs.airbyte.com/integrations/destinations/s3-data-lake githubIssueLabel: destination-s3-data-lake diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeTypesComparator.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeTypesComparator.kt new file mode 100644 index 000000000000..77f3bcea6642 --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/main/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeTypesComparator.kt @@ -0,0 +1,231 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.s3_data_lake + +import jakarta.inject.Singleton +import org.apache.iceberg.Schema +import org.apache.iceberg.types.Type +import org.apache.iceberg.types.Types + +/** + * Compares two Iceberg [Schema] definitions (including nested structs) to identify: + * - New columns that do not exist in the "existing" schema. + * - Columns whose data types have changed. + * - Columns that no longer exist in the incoming schema (removed). + * - Columns that changed from required to optional. + */ +@Singleton +class S3DataLakeTypesComparator { + + companion object { + /** Separator used to represent nested field paths: parent~child. */ + const val PARENT_CHILD_SEPARATOR: Char = '~' + + /** + * Returns a fully-qualified field name by appending `child` to `parent` using + * [PARENT_CHILD_SEPARATOR]. If `parent` is blank, returns `child` alone. + */ + private fun fullyQualifiedName(parent: String?, child: String): String = + if (parent.isNullOrBlank()) child else "$parent$PARENT_CHILD_SEPARATOR$child" + + /** + * Splits a fully-qualified name (e.g. `"outer~inner~field"`) into: + * ``` + * parent = "outer~inner" + * leaf = "field" + * ``` + * If there's no [PARENT_CHILD_SEPARATOR], then it's top-level: + * ``` + * parent = "" + * leaf = "outer" + * ``` + */ + fun splitIntoParentAndLeaf(fqName: String): Pair { + val idx = fqName.lastIndexOf(PARENT_CHILD_SEPARATOR) + return if (idx < 0) { + "" to fqName + } else { + fqName.substring(0, idx) to fqName.substring(idx + 1) + } + } + } + + /** + * A data class representing differences between two Iceberg schemas. + * + * @property newColumns list of fully-qualified column names that are new in the incoming + * schema. + * @property updatedDataTypes list of fully-qualified column names whose types differ. + * @property removedColumns list of fully-qualified column names that are no longer in the + * incoming schema. + * @property newlyOptionalColumns list of fully-qualified column names that changed from + * required -> optional. + */ + data class ColumnDiff( + val newColumns: MutableList = mutableListOf(), + val updatedDataTypes: MutableList = mutableListOf(), + val removedColumns: MutableList = mutableListOf(), + val newlyOptionalColumns: MutableList = mutableListOf() + ) { + fun hasChanges(): Boolean { + return newColumns.isNotEmpty() || + updatedDataTypes.isNotEmpty() || + removedColumns.isNotEmpty() || + newlyOptionalColumns.isNotEmpty() + } + } + + /** + * Compares [incomingSchema] with [existingSchema], returning a [ColumnDiff]. + * + * @param incomingSchema the schema of incoming data. + * @param existingSchema the schema currently known/used by Iceberg. + */ + fun compareSchemas(incomingSchema: Schema, existingSchema: Schema): ColumnDiff { + val diff = ColumnDiff() + compareStructFields( + parentPath = null, + incomingType = incomingSchema.asStruct(), + existingType = existingSchema.asStruct(), + diff = diff + ) + return diff + } + + /** + * Recursively compares fields of two struct types, identifying new, updated, or removed + * columns, and appending the results to [diff]. + * + * @param parentPath fully-qualified parent path, or `null` if at top-level. + * @param incomingType struct type of the incoming schema. + * @param existingType struct type of the existing schema. + * @param diff the [ColumnDiff] object to be updated. + */ + private fun compareStructFields( + parentPath: String?, + incomingType: Types.StructType, + existingType: Types.StructType, + diff: ColumnDiff + ) { + val incomingFieldsByName = incomingType.fields().associateBy { it.name() } + val existingFieldsByName = existingType.fields().associateBy { it.name() } + + // 1) Identify new and changed fields + for ((fieldName, incomingField) in incomingFieldsByName) { + val fqName = fullyQualifiedName(parentPath, fieldName) + val existingField = existingFieldsByName[fieldName] + + if (existingField == null) { + // This column does not exist in the existing schema => new column + diff.newColumns.add(fqName) + } else { + // The column exists in both => check for type differences at top-level + if ( + parentPath.isNullOrBlank() && + !typesAreEqual(incomingField.type(), existingField.type()) + ) { + diff.updatedDataTypes.add(fqName) + } + + // Check if it changed from required to optional at top-level + val wasRequired = !existingField.isOptional + val isNowOptional = incomingField.isOptional + if (parentPath.isNullOrBlank() && wasRequired && isNowOptional) { + diff.newlyOptionalColumns.add(fqName) + } + + // If both are struct types, recursively compare subfields + if (incomingField.type().isStructType && existingField.type().isStructType) { + compareStructFields( + parentPath = fqName, + incomingType = incomingField.type().asStructType(), + existingType = existingField.type().asStructType(), + diff = diff + ) + } + } + } + + // 2) Identify removed fields (only at top-level) + if (parentPath.isNullOrBlank()) { + for ((existingName) in existingFieldsByName) { + if (!incomingFieldsByName.containsKey(existingName)) { + val fqName = fullyQualifiedName(parentPath, existingName) + diff.removedColumns.add(fqName) + } + } + } + } + + /** + * Checks if two Iceberg [Type]s are semantically equal by comparing type IDs and any relevant + * sub-properties (e.g., for timestamps, lists, structs). + * + * @param incomingType the type from the incoming schema. + * @param existingType the type from the existing schema. + * @return `true` if they are effectively the same type, `false` otherwise. + * @throws IllegalArgumentException if an unsupported or unmapped Iceberg type is encountered. + */ + fun typesAreEqual(incomingType: Type, existingType: Type): Boolean { + if (existingType.typeId() != incomingType.typeId()) return false + + return when (val typeId = existingType.typeId()) { + Type.TypeID.BOOLEAN, + Type.TypeID.INTEGER, + Type.TypeID.LONG, + Type.TypeID.FLOAT, + Type.TypeID.DOUBLE, + Type.TypeID.DATE, + Type.TypeID.TIME, + Type.TypeID.STRING -> { + // Matching primitive types + true + } + Type.TypeID.TIMESTAMP -> { + require( + existingType is Types.TimestampType && incomingType is Types.TimestampType + ) { "Expected TIMESTAMP types, got $existingType and $incomingType." } + // Must match UTC adjustment or not + existingType.shouldAdjustToUTC() == incomingType.shouldAdjustToUTC() + } + Type.TypeID.LIST -> { + require(existingType is Types.ListType && incomingType is Types.ListType) { + "Expected LIST types, but received $existingType and $incomingType." + } + val sameElementType = + typesAreEqual(incomingType.elementType(), existingType.elementType()) + sameElementType && + (existingType.isElementOptional == incomingType.isElementOptional) + } + Type.TypeID.STRUCT -> { + val incomingStructFields = + incomingType.asStructType().fields().associateBy { it.name() } + val existingStructFields = + existingType.asStructType().fields().associateBy { it.name() } + + // For all fields in existing, ensure there's a matching field in incoming + for ((name, existingField) in existingStructFields) { + val incomingField = incomingStructFields[name] ?: return false + if (existingField.isOptional != incomingField.isOptional) return false + if (!typesAreEqual(incomingField.type(), existingField.type())) return false + } + // If there are extra fields in `incoming`, that doesn't mean they're "unequal" per + // se — + // but for this function's purpose, we only check the existing fields. + true + } + Type.TypeID.BINARY, + Type.TypeID.DECIMAL, + Type.TypeID.FIXED, + Type.TypeID.UUID, + Type.TypeID.MAP, + Type.TypeID.TIMESTAMP_NANO -> { + throw IllegalArgumentException( + "Unsupported or unmapped Iceberg type: $typeId. Implement handling if needed." + ) + } + } + } +} diff --git a/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeTypesComparatorTest.kt b/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeTypesComparatorTest.kt new file mode 100644 index 000000000000..6e95e4923e59 --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3-data-lake/src/test/kotlin/io/airbyte/integrations/destination/s3_data_lake/S3DataLakeTypesComparatorTest.kt @@ -0,0 +1,462 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.destination.s3_data_lake + +import io.airbyte.integrations.destination.s3_data_lake.S3DataLakeTypesComparator.Companion.splitIntoParentAndLeaf +import org.apache.iceberg.Schema +import org.apache.iceberg.types.Type +import org.apache.iceberg.types.Types +import org.assertj.core.api.Assertions.assertThat +import org.assertj.core.api.Assertions.assertThatThrownBy +import org.junit.jupiter.api.Test + +/** Comprehensive test suite for [S3DataLakeTypesComparator]. */ +class S3DataLakeTypesComparatorTest { + + private val comparator = S3DataLakeTypesComparator() + + /** + * Helper function to create a simple Iceberg [Types.NestedField]. + * + * @param name the field name + * @param type the field type + * @param isOptional indicates whether the field is optional + */ + private fun field(name: String, type: Type, isOptional: Boolean): Types.NestedField { + val fieldId = name.hashCode() and 0x7fffffff // Simple, stable ID generator for test + return if (isOptional) { + Types.NestedField.optional(fieldId, name, type) + } else { + Types.NestedField.required(fieldId, name, type) + } + } + + /** Convenience function to build an Iceberg [Schema] from a list of fields. */ + private fun buildSchema(vararg fields: Types.NestedField): Schema { + return Schema(fields.toList()) + } + + @Test + fun testNoDifferencesForIdenticalSchemas() { + val schema = + buildSchema( + field("id", Types.IntegerType.get(), false), + field("name", Types.StringType.get(), true), + field("created_at", Types.TimestampType.withoutZone(), false), + ) + + val diff = comparator.compareSchemas(schema, schema) + + assertThat(diff.newColumns).isEmpty() + assertThat(diff.updatedDataTypes).isEmpty() + assertThat(diff.removedColumns).isEmpty() + assertThat(diff.newlyOptionalColumns).isEmpty() + } + + @Test + fun testNewColumns() { + val existingSchema = + buildSchema( + field("id", Types.IntegerType.get(), false), + ) + val incomingSchema = + buildSchema( + field("id", Types.IntegerType.get(), false), + field("new_col_1", Types.StringType.get(), true), + field("new_col_2", Types.TimestampType.withZone(), true), + ) + + val diff = comparator.compareSchemas(incomingSchema, existingSchema) + + assertThat(diff.newColumns).containsExactlyInAnyOrder("new_col_1", "new_col_2") + assertThat(diff.updatedDataTypes).isEmpty() + assertThat(diff.removedColumns).isEmpty() + assertThat(diff.newlyOptionalColumns).isEmpty() + } + + @Test + fun testRemovedColumns() { + val existingSchema = + buildSchema( + field("id", Types.IntegerType.get(), false), + field("legacy_col", Types.StringType.get(), true), + ) + val incomingSchema = + buildSchema( + field("id", Types.IntegerType.get(), false), + ) + + val diff = comparator.compareSchemas(incomingSchema, existingSchema) + + assertThat(diff.newColumns).isEmpty() + assertThat(diff.updatedDataTypes).isEmpty() + assertThat(diff.removedColumns).containsExactly("legacy_col") + assertThat(diff.newlyOptionalColumns).isEmpty() + } + + @Test + fun testUpdatedDataTypes() { + val existingSchema = + buildSchema( + field("id", Types.IntegerType.get(), false), + field("age", Types.IntegerType.get(), true), + ) + val incomingSchema = + buildSchema( + field("id", Types.IntegerType.get(), false), + // age changes from INTEGER -> LONG + field("age", Types.LongType.get(), true), + ) + + val diff = comparator.compareSchemas(incomingSchema, existingSchema) + + assertThat(diff.newColumns).isEmpty() + assertThat(diff.updatedDataTypes).containsExactly("age") + assertThat(diff.removedColumns).isEmpty() + assertThat(diff.newlyOptionalColumns).isEmpty() + } + + @Test + fun testNewlyOptionalColumns() { + val existingSchema = + buildSchema( + field("id", Types.IntegerType.get(), false), + // name is previously required + field("name", Types.StringType.get(), false), + ) + val incomingSchema = + buildSchema( + field("id", Types.IntegerType.get(), false), + // name is now optional + field("name", Types.StringType.get(), true), + ) + + val diff = comparator.compareSchemas(incomingSchema, existingSchema) + + assertThat(diff.newColumns).isEmpty() + assertThat(diff.updatedDataTypes).isEmpty() + assertThat(diff.removedColumns).isEmpty() + // name is newly optional + assertThat(diff.newlyOptionalColumns).containsExactly("name") + } + + @Test + fun testTimestampTypeWithZoneVersusWithoutZone() { + val existingSchema = + buildSchema( + // with UTC adjustment + field("timestamp_col", Types.TimestampType.withZone(), true), + ) + val incomingSchema = + buildSchema( + // without UTC adjustment + field("timestamp_col", Types.TimestampType.withoutZone(), true), + ) + + val diff = comparator.compareSchemas(incomingSchema, existingSchema) + + // The type changes with respect to shouldAdjustToUTC() + assertThat(diff.updatedDataTypes).containsExactly("timestamp_col") + assertThat(diff.newColumns).isEmpty() + assertThat(diff.removedColumns).isEmpty() + assertThat(diff.newlyOptionalColumns).isEmpty() + } + + @Test + fun testListTypeElementChanged() { + val existingSchema = + buildSchema( + field( + "tags", + Types.ListType.ofRequired( + 100, + Types.StringType.get(), + ), + true, + ), + ) + val incomingSchema = + buildSchema( + // element type changed from String to Integer + field( + "tags", + Types.ListType.ofRequired( + 100, + Types.IntegerType.get(), + ), + true, + ), + ) + + val diff = comparator.compareSchemas(incomingSchema, existingSchema) + + assertThat(diff.updatedDataTypes).containsExactly("tags") + assertThat(diff.newColumns).isEmpty() + assertThat(diff.removedColumns).isEmpty() + assertThat(diff.newlyOptionalColumns).isEmpty() + } + + @Test + fun testListTypeElementOptionalityChanged() { + val existingSchema = + buildSchema( + field( + "values", + Types.ListType.ofRequired( + 101, + Types.StringType.get(), + ), + false, + ), + ) + val incomingSchema = + buildSchema( + // same element type, but changed from required -> optional + field( + "values", + Types.ListType.ofOptional( + 101, + Types.StringType.get(), + ), + false, + ), + ) + + val diff = comparator.compareSchemas(incomingSchema, existingSchema) + + // This appears as a type update because list element optionality changed + assertThat(diff.updatedDataTypes).containsExactly("values") + assertThat(diff.newColumns).isEmpty() + assertThat(diff.removedColumns).isEmpty() + assertThat(diff.newlyOptionalColumns).isEmpty() + } + + @Test + fun testStructFieldChanged() { + val existingStructType = + Types.StructType.of( + field("nested_id", Types.IntegerType.get(), false), + field("nested_name", Types.StringType.get(), true), + ) + val incomingStructType = + Types.StructType.of( + // nested_id changes from Integer to Long + field("nested_id", Types.LongType.get(), false), + field("nested_name", Types.StringType.get(), true), + ) + + val existingSchema = + buildSchema( + field("user_info", existingStructType, true), + ) + val incomingSchema = + buildSchema( + field("user_info", incomingStructType, true), + ) + + val diff = comparator.compareSchemas(incomingSchema, existingSchema) + + assertThat(diff.updatedDataTypes).containsExactly("user_info") + assertThat(diff.newColumns).isEmpty() + assertThat(diff.removedColumns).isEmpty() + assertThat(diff.newlyOptionalColumns).isEmpty() + } + + @Test + fun testStructFieldRenamed() { + val existingStructType = + Types.StructType.of( + field("nested_id", Types.IntegerType.get(), false), + ) + val incomingStructType = + Types.StructType.of( + // renamed from nested_id -> nested_identifier + field("nested_identifier", Types.IntegerType.get(), false), + ) + + val existingSchema = + buildSchema( + field("user_info", existingStructType, false), + ) + val incomingSchema = + buildSchema( + field("user_info", incomingStructType, false), + ) + + val diff = comparator.compareSchemas(incomingSchema, existingSchema) + + // Because the struct’s fields differ by name, the entire struct is considered different + assertThat(diff.updatedDataTypes).containsExactly("user_info") + // The renamed field is effectively a new column in nested context + assertThat(diff.newColumns).containsExactly("user_info~nested_identifier") + assertThat(diff.removedColumns).isEmpty() + assertThat(diff.newlyOptionalColumns).isEmpty() + } + + @Test + fun testStructFieldAdded() { + val existingStructType = + Types.StructType.of( + field("nested_id", Types.IntegerType.get(), false), + field( + "nested_struct", + Types.StructType.of( + field("nested_struct_id", Types.IntegerType.get(), false), + ), + false, + ), + ) + val incomingStructType = + Types.StructType.of( + field("nested_id", Types.IntegerType.get(), false), + field("new_id", Types.IntegerType.get(), false), + field( + "nested_struct", + Types.StructType.of( + field("nested_struct_id", Types.IntegerType.get(), false), + field("nested_struct_new_id", Types.IntegerType.get(), false), + ), + false, + ), + ) + + val existingSchema = + buildSchema( + field("user_info", existingStructType, false), + ) + val incomingSchema = + buildSchema( + field("user_info", incomingStructType, false), + ) + + val diff = comparator.compareSchemas(incomingSchema, existingSchema) + + assertThat(diff.updatedDataTypes).isEmpty() + assertThat(diff.newColumns) + .containsExactlyInAnyOrder( + "user_info~new_id", + "user_info~nested_struct~nested_struct_new_id", + ) + assertThat(diff.removedColumns).isEmpty() + assertThat(diff.newlyOptionalColumns).isEmpty() + } + + @Test + fun testMultipleDifferences() { + val existingSchema = + buildSchema( + // 1) remove_me -> will be removed + field("remove_me", Types.StringType.get(), true), + // 2) keep_optional -> remains as is + field("keep_optional", Types.StringType.get(), true), + // 3) make_optional -> changes from required to optional + field("make_optional", Types.IntegerType.get(), false), + // 4) type_change -> changes from INT to LONG + field("type_change", Types.IntegerType.get(), false), + ) + val incomingSchema = + buildSchema( + // remove_me is missing => REMOVED + field("keep_optional", Types.StringType.get(), true), + field("make_optional", Types.IntegerType.get(), true), + field("type_change", Types.LongType.get(), false), + // brand_new is newly added + field("brand_new", Types.FloatType.get(), true), + ) + + val diff = comparator.compareSchemas(incomingSchema, existingSchema) + + assertThat(diff.newColumns).containsExactly("brand_new") + assertThat(diff.updatedDataTypes).containsExactly("type_change") + assertThat(diff.removedColumns).containsExactly("remove_me") + assertThat(diff.newlyOptionalColumns).containsExactly("make_optional") + } + + @Test + fun testUnsupportedTypeBinary() { + val existingSchema = + buildSchema( + field("binary_col", Types.BinaryType.get(), false), + ) + val incomingSchema = + buildSchema( + field("binary_col", Types.BinaryType.get(), false), + ) + + // The code in typesAreEqual() throws for TypeID.BINARY + assertThatThrownBy { comparator.compareSchemas(incomingSchema, existingSchema) } + .isInstanceOf(IllegalArgumentException::class.java) + .hasMessageContaining("Unsupported or unmapped Iceberg type: BINARY") + } + + @Test + fun testUnsupportedTypeDecimal() { + val existingSchema = + buildSchema( + field("decimal_col", Types.DecimalType.of(10, 2), false), + ) + val incomingSchema = + buildSchema( + field("decimal_col", Types.DecimalType.of(10, 2), false), + ) + + // The code in typesAreEqual() throws for TypeID.DECIMAL + assertThatThrownBy { comparator.compareSchemas(incomingSchema, existingSchema) } + .isInstanceOf(IllegalArgumentException::class.java) + .hasMessageContaining("Unsupported or unmapped Iceberg type: DECIMAL") + } + + @Test + fun testSplitWithNoSeparatorReturnsEmptyParentAndFullNameAsLeaf() { + val (parent, leaf) = splitIntoParentAndLeaf("field") + assertThat(parent).isEmpty() + assertThat(leaf).isEqualTo("field") + } + + @Test + fun testSplitWithSingleSeparatorReturnsExpectedParentAndLeaf() { + val (parent, leaf) = splitIntoParentAndLeaf("outer~field") + assertThat(parent).isEqualTo("outer") + assertThat(leaf).isEqualTo("field") + } + + @Test + fun testSplitWithMultipleSeparatorsUsesLastSeparatorForSplitting() { + // "outer~inner~field" => parent = "outer~inner", leaf = "field" + val (parent, leaf) = splitIntoParentAndLeaf("outer~inner~field") + assertThat(parent).isEqualTo("outer~inner") + assertThat(leaf).isEqualTo("field") + } + + @Test + fun testSplitStringEndingInSeparatorHasEmptyLeaf() { + // "outer~inner~" => parent = "outer~inner", leaf = "" + val (parent, leaf) = splitIntoParentAndLeaf("outer~inner~") + assertThat(parent).isEqualTo("outer~inner") + assertThat(leaf).isEmpty() + } + + @Test + fun testSplitStringBeginningWithSeparatorHasEmptyParent() { + // "~innerField" => parent = "", leaf = "innerField" + val (parent, leaf) = splitIntoParentAndLeaf("~innerField") + assertThat(parent).isEmpty() + assertThat(leaf).isEqualTo("innerField") + } + + @Test + fun testSplitStringThatIsOnlySeparator() { + // "~" => parent = "", leaf = "" + val (parent, leaf) = splitIntoParentAndLeaf("~") + assertThat(parent).isEmpty() + assertThat(leaf).isEmpty() + } + + @Test + fun testSplitEmptyString() { + val (parent, leaf) = splitIntoParentAndLeaf("") + assertThat(parent).isEmpty() + assertThat(leaf).isEmpty() + } +} diff --git a/docs/integrations/destinations/s3-data-lake.md b/docs/integrations/destinations/s3-data-lake.md index 15c0008db508..2accb944b79d 100644 --- a/docs/integrations/destinations/s3-data-lake.md +++ b/docs/integrations/destinations/s3-data-lake.md @@ -17,8 +17,9 @@ for more information. | Version | Date | Pull Request | Subject | |:--------|:-----------|:------------------------------------------------------------|:---------------------------------------------| +| 0.2.10 | 2025-01-09 | [\#50400](https://github.com/airbytehq/airbyte/pull/50400) | Add S3DataLakeTypesComparator | | 0.2.9 | 2025-01-09 | [\#51022](https://github.com/airbytehq/airbyte/pull/51022) | Rename all classes and files from Iceberg V2 | -| 0.2.8 | 2025-01-09 | [\#51012](https://github.com/airbytehq/airbyte/pull/51012) | Rename/Cleanup package name from Iceberg V2 | +| 0.2.8 | 2025-01-09 | [\#51012](https://github.com/airbytehq/airbyte/pull/51012) | Rename/Cleanup package from Iceberg V2 | | 0.2.7 | 2025-01-09 | [\#50957](https://github.com/airbytehq/airbyte/pull/50957) | Add support for GLUE RBAC (Assume role) | | 0.2.6 | 2025-01-08 | [\#50991](https://github.com/airbytehq/airbyte/pull/50991) | Initial public release. |