Skip to content

Commit

Permalink
verification API and dump flow changes to support MySQL CHECK CONSTRA…
Browse files Browse the repository at this point in the history
…INTS (GoogleCloudPlatform#978)

* verification ap and dump flow changes

* fixed IT issue

* Check constraints verificartion api v2 (#24)

* handled function not found

* added unhandled error

* updated the error msg

---------

Co-authored-by: Vivek Yadav <[email protected]>

* fix IT issue

* comment addressed (#27)

* comment addressed
1. rename the functionNotFound
2. added condition to call verification api

* spell checked

---------

Co-authored-by: Vivek Yadav <[email protected]>

* refactor the DbDumpImpl struct (#28)

* refactor the DbDumpImpl struct

* remove the GenerateCheckConstrainstExprId method

---------

Co-authored-by: Vivek Yadav <[email protected]>

* fixed if condition

---------

Co-authored-by: taherkl <[email protected]>
Co-authored-by: Vivek Yadav <[email protected]>
Co-authored-by: Vivek Yadav <[email protected]>
  • Loading branch information
4 people authored Jan 2, 2025
1 parent e18a9a0 commit 534145a
Show file tree
Hide file tree
Showing 41 changed files with 987 additions and 158 deletions.
4 changes: 3 additions & 1 deletion common/utils/utils.go
Original file line number Diff line number Diff line change
Expand Up @@ -446,12 +446,14 @@ func GetLegacyModeSupportedDrivers() []string {
func ReadSpannerSchema(ctx context.Context, conv *internal.Conv, client *sp.Client) error {
infoSchema := spanner.InfoSchemaImpl{Client: client, Ctx: ctx, SpDialect: conv.SpDialect}
processSchema := common.ProcessSchemaImpl{}
expressionVerificationAccessor, _ := expressions_api.NewExpressionVerificationAccessorImpl(ctx, conv.SpProjectId, conv.SpInstanceId)
ddlVerifier, err := expressions_api.NewDDLVerifierImpl(ctx, conv.SpProjectId, conv.SpInstanceId)
if err != nil {
return fmt.Errorf("error trying create ddl verifier: %v", err)
}
schemaToSpanner := common.SchemaToSpannerImpl{
DdlV: ddlVerifier,
DdlV: ddlVerifier,
ExpressionVerificationAccessor: expressionVerificationAccessor,
}
err = processSchema.ProcessSchema(conv, infoSchema, common.DefaultWorkers, internal.AdditionalSchemaAttributes{IsSharded: false}, &schemaToSpanner, &common.UtilsOrderImpl{}, &common.InfoSchemaImpl{})
if err != nil {
Expand Down
4 changes: 3 additions & 1 deletion conversion/conversion.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ import (
"github.com/GoogleCloudPlatform/spanner-migration-tool/common/constants"
"github.com/GoogleCloudPlatform/spanner-migration-tool/common/task"
"github.com/GoogleCloudPlatform/spanner-migration-tool/common/utils"
"github.com/GoogleCloudPlatform/spanner-migration-tool/expressions_api"
"github.com/GoogleCloudPlatform/spanner-migration-tool/internal"
"github.com/GoogleCloudPlatform/spanner-migration-tool/internal/reports"
"github.com/GoogleCloudPlatform/spanner-migration-tool/profiles"
Expand Down Expand Up @@ -79,7 +80,8 @@ func (ci *ConvImpl) SchemaConv(migrationProjectId string, sourceProfile profiles
case constants.POSTGRES, constants.MYSQL, constants.DYNAMODB, constants.SQLSERVER, constants.ORACLE:
return schemaFromSource.schemaFromDatabase(migrationProjectId, sourceProfile, targetProfile, &GetInfoImpl{}, &common.ProcessSchemaImpl{})
case constants.PGDUMP, constants.MYSQLDUMP:
return schemaFromSource.SchemaFromDump(targetProfile.Conn.Sp.Project, targetProfile.Conn.Sp.Instance, sourceProfile.Driver, targetProfile.Conn.Sp.Dialect, ioHelper, &ProcessDumpByDialectImpl{})
expressionVerificationAccessor, _ := expressions_api.NewExpressionVerificationAccessorImpl(context.Background(), targetProfile.Conn.Sp.Project, targetProfile.Conn.Sp.Instance)
return schemaFromSource.SchemaFromDump(targetProfile.Conn.Sp.Project, targetProfile.Conn.Sp.Instance, sourceProfile.Driver, targetProfile.Conn.Sp.Dialect, ioHelper, &ProcessDumpByDialectImpl{ExpressionVerificationAccessor: expressionVerificationAccessor})
default:
return nil, fmt.Errorf("schema conversion for driver %s not supported", sourceProfile.Driver)
}
Expand Down
7 changes: 6 additions & 1 deletion conversion/conversion_from_source.go
Original file line number Diff line number Diff line change
Expand Up @@ -102,8 +102,12 @@ func (sads *SchemaFromSourceImpl) schemaFromDatabase(migrationProjectId string,
additionalSchemaAttributes := internal.AdditionalSchemaAttributes{
IsSharded: isSharded,
}

ctx := context.Background()
expressionVerificationAccessor, _ := expressions_api.NewExpressionVerificationAccessorImpl(ctx, conv.SpProjectId, conv.SpInstanceId)
schemaToSpanner := common.SchemaToSpannerImpl{
DdlV: sads.DdlVerifier,
DdlV: sads.DdlVerifier,
ExpressionVerificationAccessor: expressionVerificationAccessor,
}
return conv, processSchema.ProcessSchema(conv, infoSchema, common.DefaultWorkers, additionalSchemaAttributes, &schemaToSpanner, &common.UtilsOrderImpl{}, &common.InfoSchemaImpl{})
}
Expand All @@ -118,6 +122,7 @@ func (sads *SchemaFromSourceImpl) SchemaFromDump(SpProjectId string, SpInstanceI
ioHelper.BytesRead = n
conv := internal.MakeConv()
conv.SpDialect = spDialect
conv.Source = driver
p := internal.NewProgress(n, "Generating schema", internal.Verbose(), false, int(internal.SchemaCreationInProgress))
r := internal.NewReader(bufio.NewReader(f), p)
conv.SetSchemaMode() // Build schema and ignore data in dump.
Expand Down
7 changes: 4 additions & 3 deletions conversion/conversion_helper.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,8 @@ type ProcessDumpByDialectInterface interface {
}

type ProcessDumpByDialectImpl struct {
DdlVerifier expressions_api.DDLVerifier
ExpressionVerificationAccessor expressions_api.ExpressionVerificationAccessor
DdlVerifier expressions_api.DDLVerifier
}

type PopulateDataConvInterface interface {
Expand Down Expand Up @@ -92,9 +93,9 @@ func getSeekable(f *os.File) (*os.File, int64, error) {
func (pdd *ProcessDumpByDialectImpl) ProcessDump(driver string, conv *internal.Conv, r *internal.Reader) error {
switch driver {
case constants.MYSQLDUMP:
return common.ProcessDbDump(conv, r, mysql.DbDumpImpl{}, pdd.DdlVerifier)
return common.ProcessDbDump(conv, r, mysql.DbDumpImpl{}, pdd.DdlVerifier, pdd.ExpressionVerificationAccessor)
case constants.PGDUMP:
return common.ProcessDbDump(conv, r, postgres.DbDumpImpl{}, pdd.DdlVerifier)
return common.ProcessDbDump(conv, r, postgres.DbDumpImpl{}, pdd.DdlVerifier, pdd.ExpressionVerificationAccessor)
default:
return fmt.Errorf("process dump for driver %s not supported", driver)
}
Expand Down
3 changes: 3 additions & 0 deletions expressions_api/expression_verify.go
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,9 @@ func (ev *ExpressionVerificationAccessorImpl) removeExpressions(inputConv *inter
//TODO: Implement similar checks for DEFAULT and CHECK constraints as well
convCopy.SpSequences = nil
for _, table := range convCopy.SpSchema {
table.CheckConstraints = []ddl.CheckConstraint{}
convCopy.SpSchema[table.Id] = table

for colName, colDef := range table.ColDefs {
colDef.AutoGen = ddl.AutoGenCol{}
colDef.DefaultValue = ddl.DefaultValue{}
Expand Down
4 changes: 4 additions & 0 deletions internal/convert.go
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,10 @@ const (
NumericPKNotSupported
TypeMismatch
DefaultValueError
InvalidCondition
ColumnNotFound
CheckConstraintFunctionNotFound
GenericError
)

const (
Expand Down
59 changes: 49 additions & 10 deletions internal/reports/report_helpers.go
Original file line number Diff line number Diff line change
Expand Up @@ -111,14 +111,53 @@ func buildTableReportBody(conv *internal.Conv, tableId string, issues map[string

}

// added if to add table level issue
if p.severity == Errors && len(tableLevelIssues) != 0 {
for _, issue := range tableLevelIssues {
switch issue {
case internal.TypeMismatch:
toAppend := Issue{
Category: IssueDB[issue].Category,
Description: fmt.Sprintf("Table '%s': Type mismatch in check constraint. Verify that the column type matches the constraint logic.", conv.SpSchema[tableId].Name),
}
l = append(l, toAppend)
case internal.InvalidCondition:
toAppend := Issue{
Category: IssueDB[issue].Category,
Description: fmt.Sprintf("Table '%s': Invalid condition in check constraint. Ensure the condition is compatible with the constraint logic.", conv.SpSchema[tableId].Name),
}
l = append(l, toAppend)
case internal.ColumnNotFound:
toAppend := Issue{
Category: IssueDB[issue].Category,
Description: fmt.Sprintf("Table '%s': Column not found in check constraint. Verify that all referenced columns exist.", conv.SpSchema[tableId].Name),
}
l = append(l, toAppend)

case internal.CheckConstraintFunctionNotFound:
toAppend := Issue{
Category: IssueDB[issue].Category,
Description: fmt.Sprintf("Table '%s': Function not found in check constraint. Ensure all functions used in the condition are valid.", conv.SpSchema[tableId].Name),
}
l = append(l, toAppend)
case internal.GenericError:
toAppend := Issue{
Category: IssueDB[issue].Category,
Description: fmt.Sprintf("Table '%s': Something went wrong in check constraint. Verify the conditions and constraint logic.", conv.SpSchema[tableId].Name),
}
l = append(l, toAppend)
}
}
}

if p.severity == warning {
flag := false
for _, spFk := range conv.SpSchema[tableId].ForeignKeys {
srcFk, err := internal.GetSrcFkFromId(conv.SrcSchema[tableId].ForeignKeys, spFk.Id)
if err != nil {
continue
}
if srcFk.OnDelete == "" && srcFk.OnUpdate == "" && flag == false {
if srcFk.OnDelete == "" && srcFk.OnUpdate == "" && !flag {
flag = true
issue := internal.ForeignKeyActionNotSupported
toAppend := Issue{
Expand Down Expand Up @@ -403,18 +442,13 @@ func buildTableReportBody(conv *internal.Conv, tableId string, issues map[string
Description: fmt.Sprintf("UNIQUE constraint on column(s) '%s' replaced with primary key since table '%s' didn't have one. Spanner requires a primary key for every table", strings.Join(uniquePK, ", "), conv.SpSchema[tableId].Name),
}
l = append(l, toAppend)

case internal.DefaultValueError:
toAppend := Issue{
Category: IssueDB[i].Category,
Description: fmt.Sprintf("%s for table '%s' column '%s'", IssueDB[i].Brief, conv.SpSchema[tableId].Name, spColName),
}
l = append(l, toAppend)
case internal.TypeMismatch:
toAppend := Issue{
Category: IssueDB[i].Category,
Description: fmt.Sprintf("Table '%s': Type mismatch in '%s'column affecting check constraints. Verify data type compatibility with constraint logic", conv.SpSchema[tableId].Name, conv.SpSchema[tableId].ColDefs[colId].Name),
}
l = append(l, toAppend)
default:
toAppend := Issue{
Category: IssueDB[i].Category,
Expand Down Expand Up @@ -526,9 +560,14 @@ var IssueDB = map[internal.SchemaIssue]struct {
Category string // Standarized issue type
CategoryDescription string
}{
internal.DefaultValue: {Brief: "Some columns have default values which Spanner migration tool does not migrate. Please add the default constraints manually after the migration is complete", Severity: note, batch: true, Category: "MISSING_DEFAULT_VALUE_CONSTRAINTS"},
internal.ForeignKey: {Brief: "Spanner does not support foreign keys", Severity: warning, Category: "FOREIGN_KEY_USES"},
internal.MultiDimensionalArray: {Brief: "Spanner doesn't support multi-dimensional arrays", Severity: warning, Category: "MULTI_DIMENSIONAL_ARRAY_USES"},
internal.DefaultValue: {Brief: "Some columns have default values which Spanner migration tool does not migrate. Please add the default constraints manually after the migration is complete", Severity: note, batch: true, Category: "MISSING_DEFAULT_VALUE_CONSTRAINTS"},
internal.TypeMismatch: {Brief: "Type mismatch in check constraint mention in table", Severity: warning, Category: "TYPE_MISMATCH"},
internal.InvalidCondition: {Brief: "Invalid condition in check constraint mention in table", Severity: warning, Category: "INVALID_CONDITION"},
internal.ColumnNotFound: {Brief: "Column not found in check constraint mention in the table", Severity: warning, Category: "COLUMN_NOT_FOUND"},
internal.CheckConstraintFunctionNotFound: {Brief: "Function not found in check constraint mention in the table", Severity: warning, Category: "FUNCTION_NOT_FOUND"},
internal.GenericError: {Brief: "Something went wrong", Severity: warning, Category: "UNHANDLE_ERROR"},
internal.ForeignKey: {Brief: "Spanner does not support foreign keys", Severity: warning, Category: "FOREIGN_KEY_USES"},
internal.MultiDimensionalArray: {Brief: "Spanner doesn't support multi-dimensional arrays", Severity: warning, Category: "MULTI_DIMENSIONAL_ARRAY_USES"},
internal.NoGoodType: {Brief: "No appropriate Spanner type. The column will be made nullable in Spanner", Severity: warning, Category: "INAPPROPRIATE_TYPE",
CategoryDescription: "No appropriate Spanner type"},
internal.Numeric: {Brief: "Spanner does not support numeric. This type mapping could lose precision and is not recommended for production use", Severity: warning, Category: "NUMERIC_USES"},
Expand Down
24 changes: 24 additions & 0 deletions mocks/expressions_api_mock.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
package mocks

import (
"context"

"github.com/GoogleCloudPlatform/spanner-migration-tool/internal"
"github.com/stretchr/testify/mock"
)

// MockExpressionVerificationAccessor is a mock of ExpressionVerificationAccessor
type MockExpressionVerificationAccessor struct {
mock.Mock
}

// VerifyExpressions is a mocked method for expression verification
func (m *MockExpressionVerificationAccessor) VerifyExpressions(ctx context.Context, input internal.VerifyExpressionsInput) internal.VerifyExpressionsOutput {
args := m.Called(ctx, input)
return args.Get(0).(internal.VerifyExpressionsOutput)
}

func (m *MockExpressionVerificationAccessor) RefreshSpannerClient(ctx context.Context, project, instance string ) error {
args := m.Called(ctx, project, instance)
return args.Get(0).(error)
}
7 changes: 4 additions & 3 deletions schema/schema.go
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,10 @@ type ForeignKey struct {

// CheckConstraints represents a check constraint defined in the schema.
type CheckConstraint struct {
Name string
Expr string
Id string
Name string
Expr string
ExprId string
Id string
}

// Key respresents a primary key or index key.
Expand Down
5 changes: 3 additions & 2 deletions sources/common/dbdump.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ type DbDump interface {
// In schema mode, this method incrementally builds a schema (updating conv).
// In data mode, this method uses this schema to convert data and writes it
// to Spanner, using the data sink specified in conv.
func ProcessDbDump(conv *internal.Conv, r *internal.Reader, dbDump DbDump, ddlVerifier expressions_api.DDLVerifier) error {
func ProcessDbDump(conv *internal.Conv, r *internal.Reader, dbDump DbDump, ddlVerifier expressions_api.DDLVerifier, exprVerifier expressions_api.ExpressionVerificationAccessor) error {
if err := dbDump.ProcessDump(conv, r); err != nil {
return err
}
Expand All @@ -39,7 +39,8 @@ func ProcessDbDump(conv *internal.Conv, r *internal.Reader, dbDump DbDump, ddlVe
utilsOrder.initPrimaryKeyOrder(conv)
utilsOrder.initIndexOrder(conv)
schemaToSpanner := SchemaToSpannerImpl{
DdlV: ddlVerifier,
ExpressionVerificationAccessor: exprVerifier,
DdlV: ddlVerifier,
}
schemaToSpanner.SchemaToSpannerDDL(conv, dbDump.GetToDdl())
conv.AddPrimaryKeys()
Expand Down
Loading

0 comments on commit 534145a

Please sign in to comment.