diff --git a/internal/datacoord/import_checker_test.go b/internal/datacoord/import_checker_test.go index 8d3dfda6d37c4..0972e28c067bd 100644 --- a/internal/datacoord/import_checker_test.go +++ b/internal/datacoord/import_checker_test.go @@ -37,6 +37,7 @@ import ( "github.com/milvus-io/milvus/pkg/proto/datapb" "github.com/milvus-io/milvus/pkg/proto/indexpb" "github.com/milvus-io/milvus/pkg/proto/internalpb" + "github.com/milvus-io/milvus/pkg/proto/rootcoordpb" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/timerecord" "github.com/milvus-io/milvus/pkg/util/tsoutil" @@ -528,7 +529,6 @@ func TestImportCheckerCompaction(t *testing.T) { catalog.EXPECT().ListImportJobs(mock.Anything).Return(nil, nil) catalog.EXPECT().ListPreImportTasks(mock.Anything).Return(nil, nil) catalog.EXPECT().ListImportTasks(mock.Anything).Return(nil, nil) - catalog.EXPECT().ListSegments(mock.Anything).Return(nil, nil) catalog.EXPECT().ListChannelCheckpoint(mock.Anything).Return(nil, nil) catalog.EXPECT().ListIndexes(mock.Anything).Return(nil, nil) catalog.EXPECT().ListSegmentIndexes(mock.Anything).Return(nil, nil) @@ -543,11 +543,9 @@ func TestImportCheckerCompaction(t *testing.T) { imeta, err := NewImportMeta(context.TODO(), catalog) assert.NoError(t, err) - meta, err := newMeta(context.TODO(), catalog, nil) - assert.NoError(t, err) - broker := broker2.NewMockBroker(t) - + broker.EXPECT().ShowCollectionIDs(mock.Anything).Return(&rootcoordpb.ShowCollectionIDsResponse{}, nil) + meta, err := newMeta(context.TODO(), catalog, nil, broker) sjm := NewMockStatsJobManager(t) l0CompactionTrigger := NewMockTriggerManager(t) l0CompactionTrigger.EXPECT().PauseL0SegmentCompacting(mock.Anything).Return().Maybe() diff --git a/internal/datacoord/import_util_test.go b/internal/datacoord/import_util_test.go index 5ae82e8f9f983..73ea2dc7f8bdd 100644 --- a/internal/datacoord/import_util_test.go +++ b/internal/datacoord/import_util_test.go @@ -33,6 +33,7 @@ import ( "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus/internal/datacoord/allocator" "github.com/milvus-io/milvus/internal/datacoord/broker" + broker2 "github.com/milvus-io/milvus/internal/datacoord/broker" "github.com/milvus-io/milvus/internal/json" "github.com/milvus-io/milvus/internal/metastore/mocks" mocks2 "github.com/milvus-io/milvus/internal/mocks" @@ -41,6 +42,7 @@ import ( "github.com/milvus-io/milvus/pkg/proto/datapb" "github.com/milvus-io/milvus/pkg/proto/indexpb" "github.com/milvus-io/milvus/pkg/proto/internalpb" + "github.com/milvus-io/milvus/pkg/proto/rootcoordpb" "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/paramtable" @@ -165,17 +167,18 @@ func TestImportUtil_NewImportTasksWithDataTt(t *testing.T) { alloc.EXPECT().AllocID(mock.Anything).Return(rand.Int63(), nil) catalog := mocks.NewDataCoordCatalog(t) - catalog.EXPECT().ListSegments(mock.Anything).Return(nil, nil) + catalog.EXPECT().ListAnalyzeTasks(mock.Anything).Return(nil, nil) catalog.EXPECT().ListChannelCheckpoint(mock.Anything).Return(nil, nil) catalog.EXPECT().ListIndexes(mock.Anything).Return(nil, nil) catalog.EXPECT().ListSegmentIndexes(mock.Anything).Return(nil, nil) catalog.EXPECT().AddSegment(mock.Anything, mock.Anything).Return(nil) - catalog.EXPECT().ListAnalyzeTasks(mock.Anything).Return(nil, nil) catalog.EXPECT().ListCompactionTask(mock.Anything).Return(nil, nil) catalog.EXPECT().ListPartitionStatsInfos(mock.Anything).Return(nil, nil) catalog.EXPECT().ListStatsTasks(mock.Anything).Return(nil, nil) - meta, err := newMeta(context.TODO(), catalog, nil) + broker := broker2.NewMockBroker(t) + broker.EXPECT().ShowCollectionIDs(mock.Anything).Return(&rootcoordpb.ShowCollectionIDsResponse{}, nil) + meta, err := newMeta(context.TODO(), catalog, nil, broker) assert.NoError(t, err) tasks, err := NewImportTasks(fileGroups, job, alloc, meta) @@ -285,7 +288,6 @@ func TestImportUtil_AssembleRequestWithDataTt(t *testing.T) { } catalog := mocks.NewDataCoordCatalog(t) - catalog.EXPECT().ListSegments(mock.Anything).Return(nil, nil) catalog.EXPECT().ListChannelCheckpoint(mock.Anything).Return(nil, nil) catalog.EXPECT().ListIndexes(mock.Anything).Return(nil, nil) catalog.EXPECT().ListSegmentIndexes(mock.Anything).Return(nil, nil) @@ -301,7 +303,9 @@ func TestImportUtil_AssembleRequestWithDataTt(t *testing.T) { return id, id + n, nil }) - meta, err := newMeta(context.TODO(), catalog, nil) + broker := broker2.NewMockBroker(t) + broker.EXPECT().ShowCollectionIDs(mock.Anything).Return(&rootcoordpb.ShowCollectionIDsResponse{}, nil) + meta, err := newMeta(context.TODO(), catalog, nil, broker) assert.NoError(t, err) segment := &SegmentInfo{ SegmentInfo: &datapb.SegmentInfo{ID: 5, IsImporting: true}, diff --git a/internal/datacoord/services_test.go b/internal/datacoord/services_test.go index 51e9bd18caaf7..91a3e0d094ae4 100644 --- a/internal/datacoord/services_test.go +++ b/internal/datacoord/services_test.go @@ -22,9 +22,11 @@ import ( "github.com/milvus-io/milvus/internal/datacoord/allocator" "github.com/milvus-io/milvus/internal/datacoord/broker" "github.com/milvus-io/milvus/internal/datacoord/session" + "github.com/milvus-io/milvus/internal/distributed/streaming" "github.com/milvus-io/milvus/internal/metastore/mocks" "github.com/milvus-io/milvus/internal/metastore/model" mocks2 "github.com/milvus-io/milvus/internal/mocks" + "github.com/milvus-io/milvus/internal/mocks/distributed/mock_streaming" "github.com/milvus-io/milvus/internal/types" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/mq/msgstream" @@ -1440,6 +1442,13 @@ func TestImportV2(t *testing.T) { catalog.EXPECT().ListPreImportTasks(mock.Anything).Return(nil, nil) catalog.EXPECT().ListImportTasks(mock.Anything).Return(nil, nil) catalog.EXPECT().SaveImportJob(mock.Anything, mock.Anything).Return(nil) + wal := mock_streaming.NewMockWALAccesser(t) + b := mock_streaming.NewMockBroadcast(t) + wal.EXPECT().Broadcast().Return(b) + b.EXPECT().BlockUntilResourceKeyAckOnce(mock.Anything, mock.Anything).Return(nil) + streaming.SetWALForTest(wal) + defer streaming.RecoverWALForTest() + s.importMeta, err = NewImportMeta(context.TODO(), catalog) assert.NoError(t, err) resp, err = s.GetImportProgress(ctx, &internalpb.GetImportProgressRequest{ diff --git a/internal/datanode/msghandlerimpl/msg_handler_impl_test.go b/internal/datanode/msghandlerimpl/msg_handler_impl_test.go index 59c9817270456..5b7367b4d47b3 100644 --- a/internal/datanode/msghandlerimpl/msg_handler_impl_test.go +++ b/internal/datanode/msghandlerimpl/msg_handler_impl_test.go @@ -19,15 +19,21 @@ package msghandlerimpl import ( + "context" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" + "github.com/milvus-io/milvus/internal/distributed/streaming" "github.com/milvus-io/milvus/internal/flushcommon/broker" + "github.com/milvus-io/milvus/internal/mocks/distributed/mock_streaming" + "github.com/milvus-io/milvus/pkg/util/paramtable" ) func TestMsgHandlerImpl(t *testing.T) { + paramtable.Init() + ctx := context.Background() b := broker.NewMockBroker(t) m := NewMsgHandlerImpl(b) assert.Panics(t, func() { @@ -39,14 +45,17 @@ func TestMsgHandlerImpl(t *testing.T) { assert.Panics(t, func() { m.HandleManualFlush("", nil) }) - t.Run("HandleImport error", func(t *testing.T) { - b.EXPECT().ImportV2(mock.Anything, mock.Anything).Return(nil, assert.AnError).Once() - err := m.HandleImport(nil, "", nil) - assert.Error(t, err) - }) t.Run("HandleImport success", func(t *testing.T) { + wal := mock_streaming.NewMockWALAccesser(t) + bo := mock_streaming.NewMockBroadcast(t) + wal.EXPECT().Broadcast().Return(bo) + bo.EXPECT().Ack(mock.Anything, mock.Anything).Return(nil) + streaming.SetWALForTest(wal) + defer streaming.RecoverWALForTest() + + b.EXPECT().ImportV2(mock.Anything, mock.Anything).Return(nil, assert.AnError).Once() b.EXPECT().ImportV2(mock.Anything, mock.Anything).Return(nil, nil).Once() - err := m.HandleImport(nil, "", nil) + err := m.HandleImport(ctx, "", nil) assert.NoError(t, err) }) } diff --git a/internal/flushcommon/pipeline/flow_graph_dd_node.go b/internal/flushcommon/pipeline/flow_graph_dd_node.go index 3ff6824d51865..f9cd966868ae4 100644 --- a/internal/flushcommon/pipeline/flow_graph_dd_node.go +++ b/internal/flushcommon/pipeline/flow_graph_dd_node.go @@ -278,6 +278,9 @@ func (ddn *ddNode) Operate(in []Msg) []Msg { } case commonpb.MsgType_Import: importMsg := msg.(*msgstream.ImportMsg) + if importMsg.GetCollectionID() != ddn.collectionID { + continue + } logger := log.With( zap.String("vchannel", ddn.Name()), zap.Int32("msgType", int32(msg.Type())), diff --git a/internal/proxy/task_import.go b/internal/proxy/task_import.go index f6b4470ab2f47..c6725fc7d8a18 100644 --- a/internal/proxy/task_import.go +++ b/internal/proxy/task_import.go @@ -249,7 +249,7 @@ func (it *importTask) Execute(ctx context.Context) error { log.Ctx(ctx).Warn("broadcast import msg failed", zap.Error(err)) return err } - log.Ctx(ctx).Debug( + log.Ctx(ctx).Info( "broadcast import msg success", zap.Int64("jobID", jobID), zap.Uint64("broadcastID", resp.BroadcastID), diff --git a/scripts/run_intergration_test.sh b/scripts/run_intergration_test.sh index 999387e43c8d7..d331eea326978 100755 --- a/scripts/run_intergration_test.sh +++ b/scripts/run_intergration_test.sh @@ -39,9 +39,9 @@ for d in $(go list ./tests/integration/...); do if [[ $d == *"coordrecovery"* ]]; then echo "running coordrecovery" # simplified command to speed up coord init test since it is large. - $TEST_CMD -tags dynamic,test -v -coverprofile=profile.out -covermode=atomic "$d" -caseTimeout=20m -timeout=30m + $TEST_CMD -tags dynamic,test -v -coverprofile=profile.out -covermode=atomic "$d" -caseTimeout=30m -timeout=60m else - $TEST_CMD -race -tags dynamic,test -v -coverpkg=./... -coverprofile=profile.out -covermode=atomic "$d" -caseTimeout=15m -timeout=30m + $TEST_CMD -race -tags dynamic,test -v -coverpkg=./... -coverprofile=profile.out -covermode=atomic "$d" -caseTimeout=25m -timeout=60m fi if [ -f profile.out ]; then grep -v kafka profile.out | grep -v planparserv2/generated | grep -v mocks | sed '1d' >> ${FILE_COVERAGE_INFO} diff --git a/tests/integration/import/dynamic_field_test.go b/tests/integration/import/dynamic_field_test.go index 14b1ab1c7d89f..f0d89dca486b0 100644 --- a/tests/integration/import/dynamic_field_test.go +++ b/tests/integration/import/dynamic_field_test.go @@ -47,7 +47,7 @@ func (s *BulkInsertSuite) testImportDynamicField() { ) c := s.Cluster - ctx, cancel := context.WithTimeout(c.GetContext(), 120*time.Second) + ctx, cancel := context.WithTimeout(c.GetContext(), 240*time.Second) defer cancel() collectionName := "TestBulkInsert_B_" + funcutil.GenRandomStr() diff --git a/tests/integration/import/import_test.go b/tests/integration/import/import_test.go index 2cfe97f18a07b..4df3aee88219e 100644 --- a/tests/integration/import/import_test.go +++ b/tests/integration/import/import_test.go @@ -76,7 +76,7 @@ func (s *BulkInsertSuite) run() { ) c := s.Cluster - ctx, cancel := context.WithTimeout(c.GetContext(), 120*time.Second) + ctx, cancel := context.WithTimeout(c.GetContext(), 240*time.Second) defer cancel() collectionName := "TestBulkInsert" + funcutil.GenRandomStr() diff --git a/tests/integration/import/partition_key_test.go b/tests/integration/import/partition_key_test.go index 8ff58df999160..73fc98e5ab063 100644 --- a/tests/integration/import/partition_key_test.go +++ b/tests/integration/import/partition_key_test.go @@ -46,7 +46,7 @@ func (s *BulkInsertSuite) TestImportWithPartitionKey() { ) c := s.Cluster - ctx, cancel := context.WithTimeout(c.GetContext(), 120*time.Second) + ctx, cancel := context.WithTimeout(c.GetContext(), 240*time.Second) defer cancel() collectionName := "TestBulkInsert_WithPartitionKey_" + funcutil.GenRandomStr()