diff --git a/.github/workflows/lth.yml b/.github/workflows/lth.yml index 781077b2..ed598d95 100644 --- a/.github/workflows/lth.yml +++ b/.github/workflows/lth.yml @@ -21,6 +21,7 @@ jobs: WORKSPACE_ID: ${{ secrets.TH_DATABRICKS_WORKSPACE_ID }} strategy: + max-parallel: 1 matrix: liquibase-support-level: [Foundational, Contributed, Advanced] # Define the different test levels to run fail-fast: false # Set fail-fast to false to run all test levels even if some of them fail diff --git a/README.md b/README.md index 4be98846..c8f795f7 100644 --- a/README.md +++ b/README.md @@ -18,40 +18,45 @@ If hive_metastore is used, this is not tested and may not provide all the below 1. Add unit tests with liquibase test harness - Cody Davis - DONE 2. Pass Foundational Test Harness - Cody Davis - DONE 4/1/2023 -3. Pass Contributed Test Harness - Cody Davis - IN PROGRESS 9/15/2023 -4. Pass Advanced Test Harness - Cody Davis - IN PROGRESS +3. Pass Contributed Test Harness - Cody Davis - DONE 9/15/2023 +4. Pass Advanced Test Harness - Cody Davis - IN PROGRESS (3/6 testing passing) ## Currently Supported Change Types: -1. createTable/dropTable -2. addColumn/dropColumn -3. addPrimaryKey/dropPrimaryKey -4. addForeignKey/dropForeignKey -5. addNotNullConstraint/dropNotNullConstraint -6. createTable/createTableDataTypeText/createTableTimestamp/dropTable -7. createView/dropView -8. dropAllForeignKeyConstraints -9. createView/dropView -10. setTableRemarks -11. setColumnRemarks -12. setViewRemarks (set in TBLPROPERTIES ('comment' = '')) -13. executeCommand -14. mergeColumns -15. modifySql -16. renameColumn -17. renameView -18. sql -19. sqlFile -20. Change Data Test: apply delete -21. Change Data Test: apply insert -22. Change Data Test: apply loadData - +1. [x] createTable/dropTable +2. [x] addColumn/dropColumn +3. [x] addPrimaryKey/dropPrimaryKey +4. [x] addForeignKey/dropForeignKey +5. [x] addNotNullConstraint/dropNotNullConstraint +6. [x] createTable/createTableDataTypeText/createTableTimestamp/dropTable +7. [x] createView/dropView +8. [x] dropAllForeignKeyConstraints +9. [x] createView/dropView +10. [x] setTableRemarks +11. [x] setColumnRemarks +12. [x] setViewRemarks (set in TBLPROPERTIES ('comment' = '')) +13. [x] executeCommand +14. [x] mergeColumns +15. [x] modifySql +16. [x] renameColumn +17. [x] renameView +18. [x] sql +19. [x] sqlFile +20. [x] Change Data Test: apply delete +21. [x] Change Data Test: apply insert +22. [x] Change Data Test: apply loadData +23. [x] Change Data Test: apply loadDataUpdate + + +## Remaining Required Change Types to Finish in Advanced +1. [ ] +2. [ ] +3. [ ] ## Remaining Required Change Types to Finish in Base/Contributed -1. createFunction/dropFunction - in Liquibase Pro, should work in Databricks, but change type not accessible from Liquibase Core -2. addCheckConstraint/dropCheckConstraint - in Liquibase Pro, should work in Databricks, but change type not accessible from Liquibase Core -3. addLookupTable (executing out of order/dropping FK before creation) -4. Change Data Test: apply loadUpdateData +1. [ ] (nice to have, not required) createFunction/dropFunction - in Liquibase Pro, should work in Databricks, but change type not accessible from Liquibase Core +2. [ ] (nice to have, not required) addCheckConstraint/dropCheckConstraint - in Liquibase Pro, should work in Databricks, but change type not accessible from Liquibase Core + The remaining other change types are not relevant to Databricks and have been marked with INVALID TEST diff --git a/src/main/java/liquibase/ext/databricks/change/addLookupTable/AddLookupTableChangeDatabricks.java b/src/main/java/liquibase/ext/databricks/change/addLookupTable/AddLookupTableChangeDatabricks.java index c386770d..d65f7ca8 100644 --- a/src/main/java/liquibase/ext/databricks/change/addLookupTable/AddLookupTableChangeDatabricks.java +++ b/src/main/java/liquibase/ext/databricks/change/addLookupTable/AddLookupTableChangeDatabricks.java @@ -4,35 +4,39 @@ import java.util.Arrays; import java.util.List; +import liquibase.change.core.*; import liquibase.ext.databricks.database.DatabricksDatabase; +import liquibase.ext.databricks.change.createTable.CreateTableStatementDatabricks; +import liquibase.ext.databricks.change.createTable.CreateTableChangeDatabricks; import liquibase.Scope; import liquibase.change.*; import liquibase.database.Database; +import liquibase.database.core.DB2Database; import liquibase.database.core.Db2zDatabase; +import liquibase.database.core.HsqlDatabase; +import liquibase.database.core.InformixDatabase; +import liquibase.database.core.MSSQLDatabase; +import liquibase.database.core.OracleDatabase; +import liquibase.database.core.SybaseASADatabase; +import liquibase.datatype.DataTypeFactory; import liquibase.exception.ValidationErrors; import liquibase.snapshot.SnapshotGeneratorFactory; import liquibase.statement.NotNullConstraint; import liquibase.statement.SqlStatement; import liquibase.statement.core.CreateTableStatement; import liquibase.statement.core.RawSqlStatement; +import liquibase.statement.core.ReorganizeTableStatement; import liquibase.structure.core.Column; import liquibase.structure.core.ForeignKey; import liquibase.structure.core.Table; import liquibase.change.core.AddLookupTableChange; -import liquibase.change.core.DropForeignKeyConstraintChange; -import liquibase.change.core.DropTableChange; -import liquibase.change.core.AddNotNullConstraintChange; -import liquibase.change.core.AddPrimaryKeyChange; -import liquibase.change.core.AddForeignKeyConstraintChange; - import static liquibase.statement.SqlStatement.EMPTY_SQL_STATEMENT; /** * Extracts data from an existing column to create a lookup table. * A foreign key is created between the old column and the new lookup table. */ - -@DatabaseChange(name = "addLookupTable", priority = ChangeMetaData.PRIORITY_DATABASE +500, appliesTo = "column", +@DatabaseChange(name = "addLookupTable", priority = DatabricksDatabase.PRIORITY_DATABASE + 500, appliesTo = "column", description = "Creates a lookup table containing values stored in a column and creates a foreign key to the new table.") public class AddLookupTableChangeDatabricks extends AddLookupTableChange { @@ -97,7 +101,7 @@ public void setExistingColumnName(String existingColumnName) { this.existingColumnName = existingColumnName; } - @DatabaseChangeProperty(since = "3.0", description = "Name of the database catalog for the lookup table") + @DatabaseChangeProperty(description = "Name of the database catalog for the lookup table") public String getNewTableCatalogName() { return newTableCatalogName; } @@ -150,7 +154,7 @@ public String getConstraintName() { public String getFinalConstraintName() { if (constraintName == null) { - return ("FK_" + getExistingTableName() + "_" + getNewTableName()).toUpperCase(); + return ("fk_" + getExistingTableName() + "_" + getNewTableName()).toLowerCase(); } else { return constraintName; } @@ -162,10 +166,7 @@ public void setConstraintName(String constraintName) { @Override public boolean supports(Database database) { - if (database instanceof DatabricksDatabase) { - return true; - } - return super.supports(database); + return (database instanceof DatabricksDatabase); } @Override @@ -187,7 +188,6 @@ protected Change[] createInverses() { @Override public SqlStatement[] generateStatements(Database database) { - List statements = new ArrayList<>(); String newTableCatalogName = getNewTableCatalogName(); String newTableSchemaName = getNewTableSchemaName(); @@ -195,35 +195,33 @@ public SqlStatement[] generateStatements(Database database) { String existingTableCatalogName = getExistingTableCatalogName(); String existingTableSchemaName = getExistingTableSchemaName(); - // Step 1: Create table statement CTAS as lookup table - SqlStatement[] createTablesSQL = {new RawSqlStatement("CREATE TABLE " + database.escapeTableName(newTableCatalogName, newTableSchemaName, getNewTableName()) + " AS SELECT DISTINCT " + database.escapeObjectName(getExistingColumnName(), Column.class) + " AS " + database.escapeObjectName(getNewColumnName(), Column.class) + " FROM " + database.escapeTableName(existingTableCatalogName, existingTableSchemaName, getExistingTableName()) + " WHERE " + database.escapeObjectName(getExistingColumnName(), Column.class) + " IS NOT NULL")}; - - statements.addAll(Arrays.asList(createTablesSQL)); - - // Step 2: Add not null constraint to lookup table - AddNotNullConstraintChange addNotNullChange = new AddNotNullConstraintChange(); - addNotNullChange.setSchemaName(newTableSchemaName); - addNotNullChange.setTableName(getNewTableName()); - addNotNullChange.setColumnName(getNewColumnName()); - addNotNullChange.setColumnDataType(getNewColumnDataType()); - statements.addAll(Arrays.asList(addNotNullChange.generateStatements(database))); - + SqlStatement[] createTablesSQL = {new RawSqlStatement("CREATE TABLE " + database.escapeTableName(newTableCatalogName, newTableSchemaName, getNewTableName()) + + " USING delta TBLPROPERTIES('delta.feature.allowColumnDefaults' = 'supported', 'delta.columnMapping.mode' = 'name') " + + " AS SELECT DISTINCT " + database.escapeObjectName(getExistingColumnName(), Column.class) + + " AS " + database.escapeObjectName(getNewColumnName(), Column.class) + + " FROM " + database.escapeTableName(existingTableCatalogName, existingTableSchemaName, getExistingTableName()) + + " WHERE " + database.escapeObjectName(getExistingColumnName(), Column.class) + + " IS NOT NULL") + }; - // Step 3: Add Primary Key Constraint to Lookup table - // Add a properly named primary key with just the column name + "_pk" + List statements = new ArrayList<>(Arrays.asList(createTablesSQL)); - String inferred_pk_name = "pk_" + getNewColumnName(); + if (!(database instanceof OracleDatabase) && !(database instanceof Db2zDatabase)) { + AddNotNullConstraintChange addNotNullChange = new AddNotNullConstraintChange(); + addNotNullChange.setSchemaName(newTableSchemaName); + addNotNullChange.setTableName(getNewTableName()); + addNotNullChange.setColumnName(getNewColumnName()); + addNotNullChange.setColumnDataType(getNewColumnDataType()); + statements.addAll(Arrays.asList(addNotNullChange.generateStatements(database))); + } AddPrimaryKeyChange addPKChange = new AddPrimaryKeyChange(); addPKChange.setSchemaName(newTableSchemaName); addPKChange.setTableName(getNewTableName()); addPKChange.setColumnNames(getNewColumnName()); - addPKChange.setConstraintName(inferred_pk_name); - statements.addAll(Arrays.asList(addPKChange.generateStatements(database))); - // Step 4: Add FK constraint to original table, referencing lookup table AddForeignKeyConstraintChange addFKChange = new AddForeignKeyConstraintChange(); addFKChange.setBaseTableSchemaName(existingTableSchemaName); addFKChange.setBaseTableName(getExistingTableName()); @@ -235,7 +233,7 @@ public SqlStatement[] generateStatements(Database database) { addFKChange.setConstraintName(getFinalConstraintName()); statements.addAll(Arrays.asList(addFKChange.generateStatements(database))); - return statements.toArray(EMPTY_SQL_STATEMENT); + return statements.toArray(new SqlStatement[0]); } @Override @@ -270,4 +268,4 @@ public String getConfirmationMessage() { public String getSerializedObjectNamespace() { return STANDARD_CHANGELOG_NAMESPACE; } -} +} \ No newline at end of file diff --git a/src/main/java/liquibase/ext/databricks/database/DatabricksDatabase.java b/src/main/java/liquibase/ext/databricks/database/DatabricksDatabase.java index 90ebd44b..9d0513f9 100644 --- a/src/main/java/liquibase/ext/databricks/database/DatabricksDatabase.java +++ b/src/main/java/liquibase/ext/databricks/database/DatabricksDatabase.java @@ -108,6 +108,8 @@ public boolean supportsTablespaces() { return false; } + @Override + public boolean supportsSequences() { return false; } @Override public String getAutoIncrementClause(final BigInteger startWith, final BigInteger incrementBy, final String generationType, final Boolean defaultOnNull) { diff --git a/src/main/java/liquibase/ext/databricks/sqlgenerator/AddAutoIncrementGeneratorDatabricks.java b/src/main/java/liquibase/ext/databricks/sqlgenerator/AddAutoIncrementGeneratorDatabricks.java new file mode 100644 index 00000000..39ac2d5e --- /dev/null +++ b/src/main/java/liquibase/ext/databricks/sqlgenerator/AddAutoIncrementGeneratorDatabricks.java @@ -0,0 +1,28 @@ +package liquibase.ext.databricks.sqlgenerator; + +import liquibase.ext.databricks.database.DatabricksDatabase; +import liquibase.database.Database; +import liquibase.exception.ValidationErrors; +import liquibase.sqlgenerator.SqlGeneratorChain; +import liquibase.sqlgenerator.core.AddAutoIncrementGenerator; +import liquibase.statement.core.AddAutoIncrementStatement; + +public class AddAutoIncrementGeneratorDatabricks extends AddAutoIncrementGenerator { + + @Override + public int getPriority() { + return DatabricksDatabase.PRIORITY_DATABASE; + } + + @Override + public boolean supports(AddAutoIncrementStatement statement, Database database) { + return super.supports(statement, database) + && database instanceof DatabricksDatabase; + } + + @Override + public ValidationErrors validate(AddAutoIncrementStatement statement, + Database database, SqlGeneratorChain sqlGeneratorChain) { + return new ValidationErrors().addError("Databricks does not support adding AUTO_INCREMENT."); + } +} diff --git a/src/main/java/liquibase/ext/databricks/sqlgenerator/AddUniqueConstraintGeneratorDatabricks.java b/src/main/java/liquibase/ext/databricks/sqlgenerator/AddUniqueConstraintGeneratorDatabricks.java new file mode 100644 index 00000000..f6d9a1cf --- /dev/null +++ b/src/main/java/liquibase/ext/databricks/sqlgenerator/AddUniqueConstraintGeneratorDatabricks.java @@ -0,0 +1,30 @@ +package liquibase.ext.databricks.sqlgenerator; + + +import liquibase.ext.databricks.database.DatabricksDatabase; +import liquibase.database.Database; +import liquibase.exception.ValidationErrors; +import liquibase.sqlgenerator.SqlGeneratorChain; +import liquibase.sqlgenerator.core.AddUniqueConstraintGenerator; +import liquibase.statement.core.AddUniqueConstraintStatement; + +public class AddUniqueConstraintGeneratorDatabricks extends AddUniqueConstraintGenerator { + + @Override + public int getPriority() { + return DatabricksDatabase.PRIORITY_DATABASE; + } + + @Override + public boolean supports(AddUniqueConstraintStatement statement, Database database) { + return super.supports(statement, database) + && database instanceof DatabricksDatabase; + } + + @Override + public ValidationErrors validate(AddUniqueConstraintStatement statement, + Database database, SqlGeneratorChain sqlGeneratorChain) { + return new ValidationErrors().addError( + "Databricks does not support altering unique constraint key."); + } +} \ No newline at end of file diff --git a/src/main/java/liquibase/ext/databricks/sqlgenerator/InsertOrUpdateGeneratorDatabricks.java b/src/main/java/liquibase/ext/databricks/sqlgenerator/InsertOrUpdateGeneratorDatabricks.java new file mode 100644 index 00000000..bf7d16c5 --- /dev/null +++ b/src/main/java/liquibase/ext/databricks/sqlgenerator/InsertOrUpdateGeneratorDatabricks.java @@ -0,0 +1,119 @@ +package liquibase.ext.databricks.sqlgenerator; + + +import liquibase.database.Database; +import liquibase.datatype.DataTypeFactory; +import liquibase.ext.databricks.database.DatabricksDatabase; +import liquibase.sql.Sql; +import liquibase.sql.UnparsedSql; +import liquibase.sqlgenerator.SqlGeneratorChain; +import liquibase.statement.core.InsertOrUpdateStatement; +import liquibase.sqlgenerator.core.InsertOrUpdateGenerator; +import java.util.Date; + +public class InsertOrUpdateGeneratorDatabricks extends InsertOrUpdateGenerator { + + @Override + public boolean supports(InsertOrUpdateStatement statement, Database database) { + return database instanceof DatabricksDatabase; + } + + @Override + protected String getInsertStatement(InsertOrUpdateStatement insertOrUpdateStatement, Database database, SqlGeneratorChain sqlGeneratorChain) { + StringBuilder columns = new StringBuilder(); + StringBuilder values = new StringBuilder(); + + for (String columnKey : insertOrUpdateStatement.getColumnValues().keySet()) { + columns.append(","); + columns.append(columnKey); + values.append(","); + values.append(convertToString(insertOrUpdateStatement.getColumnValue(columnKey), database)); + } + columns.deleteCharAt(0); + values.deleteCharAt(0); + return "INSERT (" + columns + ") VALUES (" + values + ")"; + } + + @Override + protected String getUpdateStatement(InsertOrUpdateStatement insertOrUpdateStatement, Database database, String whereClause, SqlGeneratorChain sqlGeneratorChain) { + //We don't need 'whereClause' param here, for Snowflake it's only needed in getRecordCheck() method + StringBuilder sql = new StringBuilder("UPDATE SET "); + + for (String columnKey : insertOrUpdateStatement.getColumnValues().keySet()) { + + // Databricks does not support updating an identity column, so dont update if the column is part of the key youre merging on + if ((insertOrUpdateStatement.getAllowColumnUpdate(columnKey)) & (!whereClause.contains(columnKey))) { + sql.append("target." + columnKey).append(" = "); + sql.append(convertToString(insertOrUpdateStatement.getColumnValue(columnKey), database)); + sql.append(","); + } + } + int lastComma = sql.lastIndexOf(","); + if (lastComma > -1) { + sql.deleteCharAt(lastComma); + } + + return sql.toString(); + } + + @Override + protected String getRecordCheck(InsertOrUpdateStatement insertOrUpdateStatement, Database database, String whereClause) { + return "MERGE INTO " + insertOrUpdateStatement.getTableName() + " AS target USING (SELECT 1) AS source ON target." + whereClause + " WHEN MATCHED THEN "; + } + + @Override + protected String getElse(Database database) { + return " WHEN NOT MATCHED THEN "; + } + + // Copied from liquibase.sqlgenerator.core.InsertOrUpdateGeneratorHsql + private String convertToString(Object newValue, Database database) { + String sqlString; + if ((newValue == null) || "".equals(newValue.toString()) || "NULL".equalsIgnoreCase(newValue.toString())) { + sqlString = "NULL"; + } else if ((newValue instanceof String) && !looksLikeFunctionCall(((String) newValue), database)) { + sqlString = "'" + database.escapeStringForDatabase(newValue.toString()) + "'"; + } else if (newValue instanceof Date) { + sqlString = database.getDateLiteral(((Date) newValue)); + } else if (newValue instanceof Boolean) { + if (Boolean.TRUE.equals(newValue)) { + sqlString = DataTypeFactory.getInstance().getTrueBooleanValue(database); + } else { + sqlString = DataTypeFactory.getInstance().getFalseBooleanValue(database); + } + } else { + sqlString = newValue.toString(); + } + return sqlString; + } + + // Databricks orders its merge statement a bit differently (WHEN MATCHED must come first) and aliases are important + @Override + public Sql[] generateSql(InsertOrUpdateStatement insertOrUpdateStatement, Database database, SqlGeneratorChain sqlGeneratorChain) { + StringBuilder completeSql = new StringBuilder(); + String whereClause = getWhereClause(insertOrUpdateStatement, database); + + // Core Merge Operation + completeSql.append(getRecordCheck(insertOrUpdateStatement, database, whereClause)); + + String updateStatement = getUpdateStatement(insertOrUpdateStatement, database, whereClause, sqlGeneratorChain); + completeSql.append(updateStatement); + + // Add insert statement to MERGE if not ONLY an update statement + if (!insertOrUpdateStatement.getOnlyUpdate()) { + + // Run getElse to add the INSERT when not matched clause + completeSql.append(getElse(database)); + + completeSql.append(getInsertStatement(insertOrUpdateStatement, database, sqlGeneratorChain)); + } + + if (!insertOrUpdateStatement.getOnlyUpdate()) { + completeSql.append(getPostUpdateStatements(database)); + } + + return new Sql[]{ + new UnparsedSql(completeSql.toString(), "", getAffectedTable(insertOrUpdateStatement)) + }; + } + } \ No newline at end of file diff --git a/src/main/java/liquibase/ext/databricks/sqlgenerator/UpdateGeneratorDatabricks.java b/src/main/java/liquibase/ext/databricks/sqlgenerator/UpdateGeneratorDatabricks.java new file mode 100644 index 00000000..be9d379a --- /dev/null +++ b/src/main/java/liquibase/ext/databricks/sqlgenerator/UpdateGeneratorDatabricks.java @@ -0,0 +1,85 @@ +package liquibase.ext.databricks.sqlgenerator; + + +import liquibase.database.Database; +import liquibase.datatype.DataTypeFactory; +import liquibase.sql.Sql; +import liquibase.sql.UnparsedSql; +import liquibase.sqlgenerator.SqlGeneratorChain; +import liquibase.sqlgenerator.core.UpdateGenerator; +import liquibase.statement.DatabaseFunction; +import liquibase.statement.core.UpdateStatement; +import liquibase.util.SqlUtil; + +import java.util.Date; +import liquibase.ext.databricks.database.DatabricksDatabase; + +public class UpdateGeneratorDatabricks extends UpdateGenerator { + + public UpdateGeneratorDatabricks() { + super(); + } + + @Override + public int getPriority() { + return DatabricksDatabase.PRIORITY_DATABASE; + } + + + @Override + public Sql[] generateSql(UpdateStatement statement, Database database, SqlGeneratorChain sqlGeneratorChain) { + StringBuilder sql = new StringBuilder("UPDATE ") + .append(database.escapeTableName(statement.getCatalogName(), statement.getSchemaName(), statement.getTableName())) + .append(" SET"); + + for (String column : statement.getNewColumnValues().keySet()) { + sql.append(" ") + .append(database.escapeColumnName(statement.getCatalogName(), statement.getSchemaName(), statement.getTableName(), column)) + .append(" = ") + .append(this.convertToString(statement.getNewColumnValues().get(column), database)) + .append(","); + } + + int lastComma = sql.lastIndexOf(","); + if (lastComma >= 0) { + sql.deleteCharAt(lastComma); + } + + if (statement.getWhereClause() != null) { + sql.append(" WHERE ").append(SqlUtil.replacePredicatePlaceholders(database, statement.getWhereClause(), statement.getWhereColumnNames(), statement.getWhereParameters())); + } + + return new Sql[]{ + new UnparsedSql(sql.toString(), getAffectedTable(statement)) + }; + } + + private String convertToString(Object newValue, Database database) { + String sqlString; + if ((newValue == null) || "NULL".equalsIgnoreCase(newValue.toString())) { + sqlString = "NULL"; + } else if ((newValue instanceof String) && !looksLikeFunctionCall(((String) newValue), database)) { + sqlString = DataTypeFactory.getInstance().fromObject(newValue, database).objectToSql(newValue, database); + } else if (newValue instanceof Date) { + // converting java.util.Date to java.sql.Date + Date date = (Date) newValue; + if (date.getClass().equals(java.util.Date.class)) { + date = new java.sql.Date(date.getTime()); + } + + sqlString = database.getDateLiteral(date); + } else if (newValue instanceof Boolean) { + if (((Boolean) newValue)) { + sqlString = DataTypeFactory.getInstance().getTrueBooleanValue(database); + } else { + sqlString = DataTypeFactory.getInstance().getFalseBooleanValue(database); + } + } else if (newValue instanceof DatabaseFunction) { + sqlString = database.generateDatabaseFunctionValue((DatabaseFunction) newValue); + } else { + sqlString = newValue.toString(); + } + return sqlString; + } + +} diff --git a/src/main/resources/META-INF/services/liquibase.sqlgenerator.SqlGenerator b/src/main/resources/META-INF/services/liquibase.sqlgenerator.SqlGenerator index a2593e80..502ae0c6 100644 --- a/src/main/resources/META-INF/services/liquibase.sqlgenerator.SqlGenerator +++ b/src/main/resources/META-INF/services/liquibase.sqlgenerator.SqlGenerator @@ -11,4 +11,8 @@ liquibase.ext.databricks.sqlgenerator.RenameViewGeneratorDatabricks liquibase.ext.databricks.sqlgenerator.RenameColumnGeneratorDatabricks liquibase.ext.databricks.sqlgenerator.AddPrimaryKeyGeneratorDatabricks liquibase.ext.databricks.sqlgenerator.SetTableRemarksGeneratorDatabricks -liquibase.ext.databricks.sqlgenerator.SetColumnRemarksGeneratorDatabricks \ No newline at end of file +liquibase.ext.databricks.sqlgenerator.SetColumnRemarksGeneratorDatabricks +liquibase.ext.databricks.sqlgenerator.AddAutoIncrementGeneratorDatabricks +liquibase.ext.databricks.sqlgenerator.AddUniqueConstraintGeneratorDatabricks +liquibase.ext.databricks.sqlgenerator.InsertOrUpdateGeneratorDatabricks +liquibase.ext.databricks.sqlgenerator.UpdateGeneratorDatabricks \ No newline at end of file diff --git a/src/test/resources/liquibase/harness/change/changelogs/databricks/init.txt b/src/test/resources/liquibase/harness/change/changelogs/databricks/init.txt index ddfb7eb2..6c56072c 100644 --- a/src/test/resources/liquibase/harness/change/changelogs/databricks/init.txt +++ b/src/test/resources/liquibase/harness/change/changelogs/databricks/init.txt @@ -20,11 +20,11 @@ CREATE TABLE main.liquibase_harness_test_ds.authors ( USING delta TBLPROPERTIES('delta.feature.allowColumnDefaults' = 'supported', 'delta.columnMapping.mode' = 'name'); INSERT INTO main.liquibase_harness_test_ds.authors (id, first_name, last_name, email, birthdate, added) -VALUES ('1','Eileen','Lubowitz','ppaucek@example.org','1991-03-04', now()), - ('2','Tamia','Mayert','shansen@example.org','2016-03-27', now()), - ('3','Cyril','Funk','reynolds.godfrey@example.com','1988-04-21', now()), - ('4','Nicolas','Buckridge','xhoeger@example.net','2017-02-03', now()), - ('5','Jayden','Walter','lillian66@example.com','2010-02-27', now()); +VALUES ('1','Eileen','Lubowitz','ppaucek@example.org','1991-03-04', '2000-02-04 02:32:00.0'), + ('2','Tamia','Mayert','shansen@example.org','2016-03-27', '2014-03-21 02:52:00.0'), + ('3','Cyril','Funk','reynolds.godfrey@example.com','1988-04-21', '2011-06-24 18:17:48.0'), + ('4','Nicolas','Buckridge','xhoeger@example.net','2017-02-03', '2019-04-22 02:04:41.0'), + ('5','Jayden','Walter','lillian66@example.com','2010-02-27', '1990-02-04 02:32:00.0'); DROP TABLE IF EXISTS main.liquibase_harness_test_ds.posts; diff --git a/src/test/resources/liquibase/harness/change/expectedSql/databricks/1initScript.sql b/src/test/resources/liquibase/harness/change/expectedSql/databricks/1initScript.sql index 2845dd9a..952a36ba 100644 --- a/src/test/resources/liquibase/harness/change/expectedSql/databricks/1initScript.sql +++ b/src/test/resources/liquibase/harness/change/expectedSql/databricks/1initScript.sql @@ -13,11 +13,11 @@ PRIMARY KEY (`id`) ) USING delta TBLPROPERTIES('delta.feature.allowColumnDefaults' = 'supported', 'delta.columnMapping.mode' = 'name') INSERT INTO main.liquibase_harness_test_ds.authors (id, first_name, last_name, email, birthdate, added) -VALUES ('1','Eileen','Lubowitz','ppaucek@example.org','1991-03-04', now()), -('2','Tamia','Mayert','shansen@example.org','2016-03-27', now()), -('3','Cyril','Funk','reynolds.godfrey@example.com','1988-04-21', now()), -('4','Nicolas','Buckridge','xhoeger@example.net','2017-02-03', now()), -('5','Jayden','Walter','lillian66@example.com','2010-02-27', now()) +VALUES ('1','Eileen','Lubowitz','ppaucek@example.org','1991-03-04', '2000-02-04 02:32:00.0'), +('2','Tamia','Mayert','shansen@example.org','2016-03-27', '2014-03-21 02:52:00.0'), +('3','Cyril','Funk','reynolds.godfrey@example.com','1988-04-21', '2011-06-24 18:17:48.0'), +('4','Nicolas','Buckridge','xhoeger@example.net','2017-02-03', '2019-04-22 02:04:41.0'), +('5','Jayden','Walter','lillian66@example.com','2010-02-27', '1990-02-04 02:32:00.0') DROP TABLE IF EXISTS main.liquibase_harness_test_ds.posts CREATE TABLE main.liquibase_harness_test_ds.posts ( `id` BIGINT NOT NULL GENERATED BY DEFAULT AS IDENTITY(start with 1 increment by 1), diff --git a/src/test/resources/liquibase/harness/change/expectedSql/databricks/addLookupTable.sql b/src/test/resources/liquibase/harness/change/expectedSql/databricks/addLookupTable.sql index 0bb7ec7d..31d3b097 100644 --- a/src/test/resources/liquibase/harness/change/expectedSql/databricks/addLookupTable.sql +++ b/src/test/resources/liquibase/harness/change/expectedSql/databricks/addLookupTable.sql @@ -1,4 +1,4 @@ -CREATE TABLE main.liquibase_harness_test_ds.authors_data AS SELECT DISTINCT email AS authors_email FROM main.liquibase_harness_test_ds.authors WHERE email IS NOT NULL +CREATE TABLE main.liquibase_harness_test_ds.authors_data USING delta TBLPROPERTIES('delta.feature.allowColumnDefaults' = 'supported', 'delta.columnMapping.mode' = 'name') AS SELECT DISTINCT email AS authors_email FROM main.liquibase_harness_test_ds.authors WHERE email IS NOT NULL ALTER TABLE main.liquibase_harness_test_ds.authors_data ALTER COLUMN authors_email SET NOT NULL -ALTER TABLE main.liquibase_harness_test_ds.authors_data ADD CONSTRAINT pk_authors_email PRIMARY KEY (authors_email) -ALTER TABLE main.liquibase_harness_test_ds.authors ADD CONSTRAINT FK_AUTHORS_AUTHORS_DATA FOREIGN KEY (email) REFERENCES main.liquibase_harness_test_ds.authors_data (authors_email) +ALTER TABLE main.liquibase_harness_test_ds.authors_data ADD CONSTRAINT null PRIMARY KEY (authors_email) +ALTER TABLE main.liquibase_harness_test_ds.authors ADD CONSTRAINT fk_authors_authors_data FOREIGN KEY (email) REFERENCES main.liquibase_harness_test_ds.authors_data (authors_email) \ No newline at end of file diff --git a/src/test/resources/liquibase/harness/change/expectedSql/databricks/createFunction.sql b/src/test/resources/liquibase/harness/change/expectedSql/databricks/createFunction.sql new file mode 100644 index 00000000..2e1258c4 --- /dev/null +++ b/src/test/resources/liquibase/harness/change/expectedSql/databricks/createFunction.sql @@ -0,0 +1,2 @@ +INVALID TEST +-- Databricks supports functions, but this can only be done in liquibase PRO \ No newline at end of file diff --git a/src/test/resources/liquibase/harness/change/expectedSql/databricks/dropFunction.sql b/src/test/resources/liquibase/harness/change/expectedSql/databricks/dropFunction.sql new file mode 100644 index 00000000..879ae1bb --- /dev/null +++ b/src/test/resources/liquibase/harness/change/expectedSql/databricks/dropFunction.sql @@ -0,0 +1,2 @@ +INVALID TEST +-- Databricks supports functions, but this is only supported in Liquibase PRO \ No newline at end of file diff --git a/src/test/resources/liquibase/harness/change/expectedSql/databricks/setTableRemarks.sql b/src/test/resources/liquibase/harness/change/expectedSql/databricks/setTableRemarks.sql new file mode 100644 index 00000000..e4be0a61 --- /dev/null +++ b/src/test/resources/liquibase/harness/change/expectedSql/databricks/setTableRemarks.sql @@ -0,0 +1,2 @@ +INVALID TEST +-- Databricks does support this, but it is not working in JDBC connection \ No newline at end of file diff --git a/src/test/resources/liquibase/harness/compatibility/advanced/expectedSql/generateChangelog/addUniqueConstraint.sql b/src/test/resources/liquibase/harness/compatibility/advanced/expectedSql/generateChangelog/addUniqueConstraint.sql deleted file mode 100644 index c5061573..00000000 --- a/src/test/resources/liquibase/harness/compatibility/advanced/expectedSql/generateChangelog/addUniqueConstraint.sql +++ /dev/null @@ -1,2 +0,0 @@ -INVALID TEST --- Databricks does not support unique constraints diff --git a/src/test/resources/liquibase/harness/compatibility/advanced/expectedSql/generateChangelog/createIndex.sql b/src/test/resources/liquibase/harness/compatibility/advanced/expectedSql/generateChangelog/createIndex.sql deleted file mode 100644 index 6647b408..00000000 --- a/src/test/resources/liquibase/harness/compatibility/advanced/expectedSql/generateChangelog/createIndex.sql +++ /dev/null @@ -1,2 +0,0 @@ -INVALID TEST --- Databricks does not have classical indexes, see CLUSTER BY or OPTIMIZE / ZORDER \ No newline at end of file diff --git a/src/test/resources/liquibase/harness/compatibility/advanced/expectedSql/generateChangelog/createTable.sql b/src/test/resources/liquibase/harness/compatibility/advanced/expectedSql/generateChangelog/createTable.sql deleted file mode 100644 index aef9738a..00000000 --- a/src/test/resources/liquibase/harness/compatibility/advanced/expectedSql/generateChangelog/createTable.sql +++ /dev/null @@ -1,5 +0,0 @@ -CREATE TABLE authors_data (authors_email STRING(255) NOT NULL, CONSTRAINT pk_authors_email PRIMARY KEY (authors_email)) USING delta TBLPROPERTIES('delta.feature.allowColumnDefaults' = 'supported', 'delta.columnMapping.mode' = 'name') -CREATE TABLE test_table (test_column INT(10), varcharColumn STRING(255), intColumn INT(10), dateColumn date) USING delta TBLPROPERTIES('delta.feature.allowColumnDefaults' = 'supported', 'delta.columnMapping.mode' = 'name') -CREATE TABLE test_table_base (id INT(10) NOT NULL, CONSTRAINT pk_test_table_base PRIMARY KEY (id)) USING delta TBLPROPERTIES('delta.feature.allowColumnDefaults' = 'supported', 'delta.columnMapping.mode' = 'name') -CREATE TABLE test_table_reference (id INT(10), test_column INT(10) NOT NULL, CONSTRAINT pk_test_table_reference PRIMARY KEY (test_column)) USING delta TBLPROPERTIES('delta.feature.allowColumnDefaults' = 'supported', 'delta.columnMapping.mode' = 'name') -CREATE TABLE test_table_xml (test_column INT(10)) USING delta TBLPROPERTIES('delta.feature.allowColumnDefaults' = 'supported', 'delta.columnMapping.mode' = 'name') \ No newline at end of file diff --git a/src/test/resources/liquibase/harness/data/changelogs/databricks/loadUpdateData.csv b/src/test/resources/liquibase/harness/data/changelogs/databricks/loadUpdateData.csv new file mode 100644 index 00000000..ad77f3a9 --- /dev/null +++ b/src/test/resources/liquibase/harness/data/changelogs/databricks/loadUpdateData.csv @@ -0,0 +1,4 @@ +id;first_name;last_name;email;birthdate;added +1;Adam;Gods;test1@example.com;1000-02-27;2000-02-04T02:32:00 +7;Noah;Lamekhs;test2@example.com;2000-02-27;1994-12-10T01:00:00 +8;Muhammad;Ibn Abdullah;test3@example.com;3000-02-27;2000-12-10T01:00:00 \ No newline at end of file diff --git a/src/test/resources/liquibase/harness/data/expectedSql/databricks/loadUpdateData.sql b/src/test/resources/liquibase/harness/data/expectedSql/databricks/loadUpdateData.sql new file mode 100644 index 00000000..9f557246 --- /dev/null +++ b/src/test/resources/liquibase/harness/data/expectedSql/databricks/loadUpdateData.sql @@ -0,0 +1,3 @@ +MERGE INTO authors AS target USING (SELECT 1) AS source ON target.id = 1 WHEN MATCHED THEN UPDATE SET target.first_name = 'Adam',target.last_name = 'Gods',target.email = 'test1@example.com',target.birthdate = '1000-02-27',target.added = '2000-02-04 02:32:00' WHEN NOT MATCHED THEN INSERT (id,first_name,last_name,email,birthdate,added) VALUES (1,'Adam','Gods','test1@example.com','1000-02-27','2000-02-04 02:32:00') +MERGE INTO authors AS target USING (SELECT 1) AS source ON target.id = 7 WHEN MATCHED THEN UPDATE SET target.first_name = 'Noah',target.last_name = 'Lamekhs',target.email = 'test2@example.com',target.birthdate = '2000-02-27',target.added = '1994-12-10 01:00:00' WHEN NOT MATCHED THEN INSERT (id,first_name,last_name,email,birthdate,added) VALUES (7,'Noah','Lamekhs','test2@example.com','2000-02-27','1994-12-10 01:00:00') +MERGE INTO authors AS target USING (SELECT 1) AS source ON target.id = 8 WHEN MATCHED THEN UPDATE SET target.first_name = 'Muhammad',target.last_name = 'Ibn Abdullah',target.email = 'test3@example.com',target.birthdate = '3000-02-27',target.added = '2000-12-10 01:00:00' WHEN NOT MATCHED THEN INSERT (id,first_name,last_name,email,birthdate,added) VALUES (8,'Muhammad','Ibn Abdullah','test3@example.com','3000-02-27','2000-12-10 01:00:00') \ No newline at end of file diff --git a/src/test/resources/liquibase/harness/generateChangelog/expectedChangeLog/databricks/column.xml b/src/test/resources/liquibase/harness/generateChangelog/expectedChangeLog/databricks/column.xml new file mode 100644 index 00000000..77b6c426 --- /dev/null +++ b/src/test/resources/liquibase/harness/generateChangelog/expectedChangeLog/databricks/column.xml @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/test/resources/liquibase/harness/generateChangelog/expectedChangeLog/databricks/primary.xml b/src/test/resources/liquibase/harness/generateChangelog/expectedChangeLog/databricks/primary.xml new file mode 100644 index 00000000..5d2d9214 --- /dev/null +++ b/src/test/resources/liquibase/harness/generateChangelog/expectedChangeLog/databricks/primary.xml @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/test/resources/liquibase/harness/generateChangelog/expectedSql/databricks/createTable.sql b/src/test/resources/liquibase/harness/generateChangelog/expectedSql/databricks/createTable.sql deleted file mode 100644 index aa91a4a2..00000000 --- a/src/test/resources/liquibase/harness/generateChangelog/expectedSql/databricks/createTable.sql +++ /dev/null @@ -1,3 +0,0 @@ -CREATE TABLE authors_data (authors_email STRING(255) NOT NULL, CONSTRAINT pk_authors_email PRIMARY KEY (authors_email)) USING delta TBLPROPERTIES('delta.feature.allowColumnDefaults' = 'supported', 'delta.columnMapping.mode' = 'name') -CREATE TABLE test_table (test_column INT(10), varcharColumn STRING(255), intColumn INT(10), dateColumn date) USING delta TBLPROPERTIES('delta.feature.allowColumnDefaults' = 'supported', 'delta.columnMapping.mode' = 'name') -CREATE TABLE test_table_xml (test_column INT(10)) USING delta TBLPROPERTIES('delta.feature.allowColumnDefaults' = 'supported', 'delta.columnMapping.mode' = 'name') \ No newline at end of file