From 3277755eaff5ee14b0b0ed77995d2ee29eea8c81 Mon Sep 17 00:00:00 2001 From: husharp Date: Tue, 14 May 2024 18:50:00 +0800 Subject: [PATCH] seperate unit test Signed-off-by: husharp --- .github/workflows/pd-tests.yaml | 29 +++-- Makefile | 4 +- codecov.yml | 6 - scripts/ci-subtask.sh | 88 +++++--------- tests/scheduling_cluster.go | 2 +- tests/server/cluster/cluster_test.go | 7 +- tests/testutil.go | 2 +- tools/pd-ut/README.md | 4 + tools/pd-ut/coverProfile.go | 176 +++++++++++++++++++++++++++ tools/pd-ut/ut.go | 107 ++++++++++++---- 10 files changed, 320 insertions(+), 105 deletions(-) create mode 100644 tools/pd-ut/coverProfile.go diff --git a/.github/workflows/pd-tests.yaml b/.github/workflows/pd-tests.yaml index 9084c7545a8..223187737e0 100644 --- a/.github/workflows/pd-tests.yaml +++ b/.github/workflows/pd-tests.yaml @@ -25,24 +25,33 @@ jobs: strategy: fail-fast: true matrix: - worker_id: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] + include: + - worker_id: 1 + name: 'Unit Test(1)' + - worker_id: 2 + name: 'Unit Test(2)' + - worker_id: 3 + name: 'Tools Test' + - worker_id: 4 + name: 'Client Integration Test' + - worker_id: 5 + name: 'TSO Integration Test' + - worker_id: 6 + name: 'MicroService Integration Test' outputs: - job-total: 13 + job-total: 6 steps: - name: Checkout code uses: actions/checkout@v4 - uses: actions/setup-go@v5 with: go-version: '1.21' - - name: Make Test + - name: ${{ matrix.name }} env: WORKER_ID: ${{ matrix.worker_id }} - WORKER_COUNT: 13 - JOB_COUNT: 9 # 10 is tools test, 11, 12, 13 are for other integrations jobs run: | - make ci-test-job JOB_COUNT=$(($JOB_COUNT)) JOB_INDEX=$WORKER_ID + make ci-test-job JOB_INDEX=$WORKER_ID mv covprofile covprofile_$WORKER_ID - sed -i "/failpoint_binding/d" covprofile_$WORKER_ID - name: Upload coverage result ${{ matrix.worker_id }} uses: actions/upload-artifact@v4 with: @@ -62,7 +71,11 @@ jobs: - name: Merge env: TOTAL_JOBS: ${{needs.chunks.outputs.job-total}} - run: for i in $(seq 1 $TOTAL_JOBS); do cat covprofile_$i >> covprofile; done + run: | + for i in $(seq 1 $TOTAL_JOBS); do cat covprofile_$i >> covprofile; done + sed -i "/failpoint_binding/d" covprofile + # only keep the first line(`mode: aomic`) of the coverage profile + sed -i '2,${/mode: atomic/d;}' covprofile - name: Send coverage uses: codecov/codecov-action@v4.2.0 with: diff --git a/Makefile b/Makefile index 205896c377a..779cbd21efb 100644 --- a/Makefile +++ b/Makefile @@ -127,7 +127,7 @@ regions-dump: stores-dump: cd tools && CGO_ENABLED=0 go build -gcflags '$(GCFLAGS)' -ldflags '$(LDFLAGS)' -o $(BUILD_BIN_PATH)/stores-dump stores-dump/main.go pd-ut: pd-xprog - cd tools && GOEXPERIMENT=$(BUILD_GOEXPERIMENT) CGO_ENABLED=$(BUILD_TOOL_CGO_ENABLED) go build -gcflags '$(GCFLAGS)' -ldflags '$(LDFLAGS)' -o $(BUILD_BIN_PATH)/pd-ut pd-ut/ut.go + cd tools && GOEXPERIMENT=$(BUILD_GOEXPERIMENT) CGO_ENABLED=$(BUILD_TOOL_CGO_ENABLED) go build -gcflags '$(GCFLAGS)' -ldflags '$(LDFLAGS)' -o $(BUILD_BIN_PATH)/pd-ut pd-ut/ut.go pd-ut/coverProfile.go pd-xprog: cd tools && GOEXPERIMENT=$(BUILD_GOEXPERIMENT) CGO_ENABLED=$(BUILD_TOOL_CGO_ENABLED) go build -tags xprog -gcflags '$(GCFLAGS)' -ldflags '$(LDFLAGS)' -o $(BUILD_BIN_PATH)/xprog pd-ut/xprog.go @@ -251,7 +251,7 @@ basic-test: install-tools go test $(BASIC_TEST_PKGS) || { $(FAILPOINT_DISABLE); exit 1; } @$(FAILPOINT_DISABLE) -ci-test-job: install-tools dashboard-ui +ci-test-job: install-tools dashboard-ui pd-ut @$(FAILPOINT_ENABLE) ./scripts/ci-subtask.sh $(JOB_COUNT) $(JOB_INDEX) || { $(FAILPOINT_DISABLE); exit 1; } @$(FAILPOINT_DISABLE) diff --git a/codecov.yml b/codecov.yml index bb439917e78..936eb3bbb11 100644 --- a/codecov.yml +++ b/codecov.yml @@ -24,9 +24,3 @@ flag_management: target: 74% # increase it if you want to enforce higher coverage for project, current setting as 74% is for do not let the error be reported and lose the meaning of warning. - type: patch target: 74% # increase it if you want to enforce higher coverage for project, current setting as 74% is for do not let the error be reported and lose the meaning of warning. - -ignore: - # Ignore the tool tests - - tests/dashboard - - tests/pdbackup - - tests/pdctl diff --git a/scripts/ci-subtask.sh b/scripts/ci-subtask.sh index c00cba9c0a4..c8ac823bf19 100755 --- a/scripts/ci-subtask.sh +++ b/scripts/ci-subtask.sh @@ -3,63 +3,33 @@ # ./ci-subtask.sh ROOT_PATH_COV=$(pwd)/covprofile - -if [[ $2 -gt 9 ]]; then - # run tools tests - if [[ $2 -eq 10 ]]; then +# Currently, we only have 3 integration tests, so we can hardcode the task index. +integrations_dir=$(pwd)/tests/integrations + +case $1 in + 1) + # unit tests ignore `tests` + ./bin/pd-ut run --race --ignore tests --coverprofile $ROOT_PATH_COV || exit 1 + ;; + 2) + # unit tests only in `tests` + ./bin/pd-ut run tests --race --coverprofile $ROOT_PATH_COV || exit 1 + ;; + 3) + # tools tests cd ./tools && make ci-test-job && cat covprofile >> $ROOT_PATH_COV || exit 1 - exit - fi - - # Currently, we only have 3 integration tests, so we can hardcode the task index. - integrations_dir=$(pwd)/tests/integrations - integrations_tasks=($(find "$integrations_dir" -mindepth 1 -maxdepth 1 -type d)) - for t in "${integrations_tasks[@]}"; do - if [[ "$t" = "$integrations_dir/client" && $2 -eq 11 ]]; then - cd ./client && make ci-test-job && cat covprofile >> $ROOT_PATH_COV || exit 1 - cd $integrations_dir && make ci-test-job test_name=client && cat ./client/covprofile >> $ROOT_PATH_COV || exit 1 - elif [[ "$t" = "$integrations_dir/tso" && $2 -eq 12 ]]; then - cd $integrations_dir && make ci-test-job test_name=tso && cat ./tso/covprofile >> $ROOT_PATH_COV || exit 1 - elif [[ "$t" = "$integrations_dir/mcs" && $2 -eq 13 ]]; then - cd $integrations_dir && make ci-test-job test_name=mcs && cat ./mcs/covprofile >> $ROOT_PATH_COV || exit 1 - fi - done -else - # Get package test list. - packages=($(go list ./...)) - dirs=($(find . -iname "*_test.go" -exec dirname {} \; | sort -u | sed -e "s/^\./github.com\/tikv\/pd/")) - tasks=($(comm -12 <(printf "%s\n" "${packages[@]}") <(printf "%s\n" "${dirs[@]}"))) - - weight() { - [[ $1 == "github.com/tikv/pd/server/api" ]] && return 30 - [[ $1 == "github.com/tikv/pd/pkg/schedule" ]] && return 30 - [[ $1 == "github.com/tikv/pd/pkg/core" ]] && return 30 - [[ $1 == "github.com/tikv/pd/tests/server/api" ]] && return 30 - [[ $1 =~ "pd/tests" ]] && return 5 - return 1 - } - - # Create an associative array to store the weight of each task. - declare -A task_weights - for t in ${tasks[@]}; do - weight $t - task_weights[$t]=$? - done - - # Sort tasks by weight in descending order. - tasks=($(printf "%s\n" "${tasks[@]}" | sort -rn)) - - scores=($(seq "$1" | xargs -I{} echo 0)) - - res=() - for t in ${tasks[@]}; do - min_i=0 - for i in ${!scores[@]}; do - [[ ${scores[i]} -lt ${scores[$min_i]} ]] && min_i=$i - done - scores[$min_i]=$((${scores[$min_i]} + ${task_weights[$t]})) - [[ $(($min_i + 1)) -eq $2 ]] && res+=($t) - done - - CGO_ENABLED=1 go test -timeout=15m -tags deadlock -race -cover -covermode=atomic -coverprofile=$ROOT_PATH_COV -coverpkg=./... ${res[@]} -fi + ;; + 4) + # integration test client + cd ./client && make ci-test-job && cat covprofile >> $ROOT_PATH_COV || exit 1 + cd $integrations_dir && make ci-test-job test_name=client && cat ./client/covprofile >> $ROOT_PATH_COV || exit 1 + ;; + 5) + # integration test tso + cd $integrations_dir && make ci-test-job test_name=tso && cat ./tso/covprofile >> $ROOT_PATH_COV || exit 1 + ;; + 6) + # integration test mcs + cd $integrations_dir && make ci-test-job test_name=mcs && cat ./mcs/covprofile >> $ROOT_PATH_COV || exit 1 + ;; +esac diff --git a/tests/scheduling_cluster.go b/tests/scheduling_cluster.go index 1768c4128cc..434a6bd9a48 100644 --- a/tests/scheduling_cluster.go +++ b/tests/scheduling_cluster.go @@ -113,7 +113,7 @@ func (tc *TestSchedulingCluster) WaitForPrimaryServing(re *require.Assertions) * } } return false - }, testutil.WithWaitFor(5*time.Second), testutil.WithTickInterval(50*time.Millisecond)) + }, testutil.WithWaitFor(10*time.Second), testutil.WithTickInterval(50*time.Millisecond)) return primary } diff --git a/tests/server/cluster/cluster_test.go b/tests/server/cluster/cluster_test.go index aea5ff73968..61a4561c55a 100644 --- a/tests/server/cluster/cluster_test.go +++ b/tests/server/cluster/cluster_test.go @@ -753,20 +753,19 @@ func TestConcurrentHandleRegion(t *testing.T) { re.NoError(err) peerID, err := id.Alloc() re.NoError(err) - regionID, err := id.Alloc() - re.NoError(err) peer := &metapb.Peer{Id: peerID, StoreId: store.GetId()} regionReq := &pdpb.RegionHeartbeatRequest{ Header: testutil.NewRequestHeader(clusterID), Region: &metapb.Region{ - Id: regionID, + // mock error msg to trigger stream.Recv() + Id: 0, Peers: []*metapb.Peer{peer}, }, Leader: peer, } err = stream.Send(regionReq) re.NoError(err) - // make sure the first store can receive one response + // make sure the first store can receive one response(error msg) if i == 0 { wg.Add(1) } diff --git a/tests/testutil.go b/tests/testutil.go index 5d9905af64c..150fe3dc79d 100644 --- a/tests/testutil.go +++ b/tests/testutil.go @@ -157,7 +157,7 @@ func WaitForPrimaryServing(re *require.Assertions, serverMap map[string]bs.Serve } } return false - }, testutil.WithWaitFor(5*time.Second), testutil.WithTickInterval(50*time.Millisecond)) + }, testutil.WithWaitFor(10*time.Second), testutil.WithTickInterval(50*time.Millisecond)) return primary } diff --git a/tools/pd-ut/README.md b/tools/pd-ut/README.md index 77b59bea4f7..805ee5cf322 100644 --- a/tools/pd-ut/README.md +++ b/tools/pd-ut/README.md @@ -63,4 +63,8 @@ pd-ut run --junitfile xxx // test with race flag pd-ut run --race + +// test with coverprofile +pd-ut run --coverprofile xxx +go tool cover --func=xxx ``` diff --git a/tools/pd-ut/coverProfile.go b/tools/pd-ut/coverProfile.go new file mode 100644 index 00000000000..0ed1c3f3c61 --- /dev/null +++ b/tools/pd-ut/coverProfile.go @@ -0,0 +1,176 @@ +// Copyright 2024 TiKV Project Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "bufio" + "fmt" + "os" + "path" + "sort" + + "golang.org/x/tools/cover" +) + +func collectCoverProfileFile() { + // Combine all the cover file of single test function into a whole. + files, err := os.ReadDir(coverFileTempDir) + if err != nil { + fmt.Println("collect cover file error:", err) + os.Exit(-1) + } + + w, err := os.Create(coverProfile) + if err != nil { + fmt.Println("create cover file error:", err) + os.Exit(-1) + } + //nolint: errcheck + defer w.Close() + w.WriteString("mode: atomic\n") + + result := make(map[string]*cover.Profile) + for _, file := range files { + if file.IsDir() { + continue + } + collectOneCoverProfileFile(result, file) + } + + w1 := bufio.NewWriter(w) + for _, prof := range result { + for _, block := range prof.Blocks { + fmt.Fprintf(w1, "%s:%d.%d,%d.%d %d %d\n", + prof.FileName, + block.StartLine, + block.StartCol, + block.EndLine, + block.EndCol, + block.NumStmt, + block.Count, + ) + } + if err := w1.Flush(); err != nil { + fmt.Println("flush data to cover profile file error:", err) + os.Exit(-1) + } + } +} + +func collectOneCoverProfileFile(result map[string]*cover.Profile, file os.DirEntry) { + f, err := os.Open(path.Join(coverFileTempDir, file.Name())) + if err != nil { + fmt.Println("open temp cover file error:", err) + os.Exit(-1) + } + //nolint: errcheck + defer f.Close() + + profs, err := cover.ParseProfilesFromReader(f) + if err != nil { + fmt.Println("parse cover profile file error:", err) + os.Exit(-1) + } + mergeProfile(result, profs) +} + +func mergeProfile(m map[string]*cover.Profile, profs []*cover.Profile) { + for _, prof := range profs { + sort.Sort(blocksByStart(prof.Blocks)) + old, ok := m[prof.FileName] + if !ok { + m[prof.FileName] = prof + continue + } + + // Merge samples from the same location. + // The data has already been sorted. + tmp := old.Blocks[:0] + var i, j int + for i < len(old.Blocks) && j < len(prof.Blocks) { + v1 := old.Blocks[i] + v2 := prof.Blocks[j] + + switch compareProfileBlock(v1, v2) { + case -1: + tmp = appendWithReduce(tmp, v1) + i++ + case 1: + tmp = appendWithReduce(tmp, v2) + j++ + default: + tmp = appendWithReduce(tmp, v1) + tmp = appendWithReduce(tmp, v2) + i++ + j++ + } + } + for ; i < len(old.Blocks); i++ { + tmp = appendWithReduce(tmp, old.Blocks[i]) + } + for ; j < len(prof.Blocks); j++ { + tmp = appendWithReduce(tmp, prof.Blocks[j]) + } + + m[prof.FileName] = old + } +} + +// appendWithReduce works like append(), but it merge the duplicated values. +func appendWithReduce(input []cover.ProfileBlock, b cover.ProfileBlock) []cover.ProfileBlock { + if len(input) >= 1 { + last := &input[len(input)-1] + if b.StartLine == last.StartLine && + b.StartCol == last.StartCol && + b.EndLine == last.EndLine && + b.EndCol == last.EndCol { + if b.NumStmt != last.NumStmt { + panic(fmt.Errorf("inconsistent NumStmt: changed from %d to %d", last.NumStmt, b.NumStmt)) + } + // Merge the data with the last one of the slice. + last.Count |= b.Count + return input + } + } + return append(input, b) +} + +type blocksByStart []cover.ProfileBlock + +func compareProfileBlock(x, y cover.ProfileBlock) int { + if x.StartLine < y.StartLine { + return -1 + } + if x.StartLine > y.StartLine { + return 1 + } + + // Now x.StartLine == y.StartLine + if x.StartCol < y.StartCol { + return -1 + } + if x.StartCol > y.StartCol { + return 1 + } + + return 0 +} + +func (b blocksByStart) Len() int { return len(b) } +func (b blocksByStart) Swap(i, j int) { b[i], b[j] = b[j], b[i] } +func (b blocksByStart) Less(i, j int) bool { + bi, bj := b[i], b[j] + return bi.StartLine < bj.StartLine || bi.StartLine == bj.StartLine && bi.StartCol < bj.StartCol +} diff --git a/tools/pd-ut/ut.go b/tools/pd-ut/ut.go index 7fc96ee11cf..dbbd88b868a 100644 --- a/tools/pd-ut/ut.go +++ b/tools/pd-ut/ut.go @@ -74,7 +74,11 @@ pd-ut build xxx pd-ut run --junitfile xxx // test with race flag -pd-ut run --race` +pd-ut run --race + +// test with coverprofile +pd-ut run --coverprofile xxx +go tool cover --func=xxx` fmt.Println(msg) return true @@ -84,17 +88,32 @@ const modulePath = "github.com/tikv/pd" var ( // runtime - p int - buildParallel int - workDir string + p int + buildParallel int + workDir string + coverFileTempDir string // arguments - race bool - junitFile string + race bool + junitFile string + coverProfile string + ignoreDir string ) func main() { race = handleFlag("--race") junitFile = stripFlag("--junitfile") + coverProfile = stripFlag("--coverprofile") + ignoreDir = stripFlag("--ignore") + + if coverProfile != "" { + var err error + coverFileTempDir, err = os.MkdirTemp(os.TempDir(), "cov") + if err != nil { + fmt.Println("create temp dir fail", coverFileTempDir) + os.Exit(1) + } + defer os.RemoveAll(coverFileTempDir) + } // Get the correct count of CPU if it's in docker. p = runtime.GOMAXPROCS(0) @@ -204,10 +223,16 @@ func cmdBuild(args ...string) bool { // build test binary of a single package if len(args) >= 1 { - pkg := args[0] - err := buildTestBinary(pkg) + var dirPkgs []string + for _, pkg := range pkgs { + if strings.Contains(pkg, args[0]) { + dirPkgs = append(dirPkgs, pkg) + } + } + + err := buildTestBinaryMulti(dirPkgs) if err != nil { - log.Println("build package error", pkg, err) + log.Println("build package error", dirPkgs, err) return false } } @@ -248,23 +273,32 @@ func cmdRun(args ...string) bool { // run tests for a single package if len(args) == 1 { - pkg := args[0] - err := buildTestBinary(pkg) - if err != nil { - log.Println("build package error", pkg, err) - return false + var dirPkgs []string + for _, pkg := range pkgs { + if strings.Contains(pkg, args[0]) { + dirPkgs = append(dirPkgs, pkg) + } } - exist, err := testBinaryExist(pkg) + + err := buildTestBinaryMulti(dirPkgs) if err != nil { - log.Println("check test binary existence error", err) + log.Println("build package error", dirPkgs, err) return false } - if !exist { - fmt.Println("no test case in ", pkg) - return false + for _, pkg := range dirPkgs { + exist, err := testBinaryExist(pkg) + if err != nil { + fmt.Println("check test binary existence error", err) + return false + } + if !exist { + fmt.Println("no test case in ", pkg) + continue + } + + tasks = listTestCases(pkg, tasks) } - tasks = listTestCases(pkg, tasks) } // run a single test @@ -326,6 +360,10 @@ func cmdRun(args ...string) bool { } } + if coverProfile != "" { + collectCoverProfileFile() + } + for _, work := range works { if work.Fail { return false @@ -336,7 +374,7 @@ func cmdRun(args ...string) bool { // stripFlag strip the '--flag xxx' from the command line os.Args // Example of the os.Args changes -// Before: ut run pkg TestXXX --junitfile yyy +// Before: ut run pkg TestXXX --coverprofile xxx --junitfile yyy // After: ut run pkg TestXXX // The value of the flag is returned. func stripFlag(flag string) string { @@ -421,6 +459,7 @@ func filterTestCases(tasks []task, arg1 string) ([]task, error) { func listPackages() ([]string, error) { cmd := exec.Command("go", "list", "./...") + cmd.Dir = workDir ss, err := cmdToLines(cmd) if err != nil { return nil, withTrace(err) @@ -565,7 +604,16 @@ func failureCases(input []JUnitTestCase) int { func (*numa) testCommand(pkg string, fn string) *exec.Cmd { args := make([]string, 0, 10) exe := "./" + testFileName(pkg) - args = append(args, "-test.cpu", "1") + if coverProfile != "" { + fileName := strings.ReplaceAll(pkg, "/", "_") + "." + fn + tmpFile := path.Join(coverFileTempDir, fileName) + args = append(args, "-test.coverprofile", tmpFile) + } + if strings.Contains(fn, "Suite") { + args = append(args, "-test.cpu", fmt.Sprint(p/2)) + } else { + args = append(args, "-test.cpu", "1") + } if !race { args = append(args, []string{"-test.timeout", "2m"}...) } else { @@ -580,7 +628,10 @@ func (*numa) testCommand(pkg string, fn string) *exec.Cmd { } func skipDIR(pkg string) bool { - skipDir := []string{"tests", "bin", "cmd", "tools"} + skipDir := []string{"bin", "cmd", "realcluster", "tests/integrations"} + if ignoreDir != "" { + skipDir = append(skipDir, ignoreDir) + } for _, ignore := range skipDir { if strings.HasPrefix(pkg, ignore) { return true @@ -593,8 +644,9 @@ func generateBuildCache() error { // cd cmd/pd-server && go test -tags=tso_function_test,deadlock -exec-=true -vet=off -toolexec=go-compile-without-link cmd := exec.Command("go", "test", "-exec=true", "-vet", "off", "--tags=tso_function_test,deadlock") goCompileWithoutLink := fmt.Sprintf("-toolexec=%s/tools/pd-ut/go-compile-without-link.sh", workDir) - cmd.Args = append(cmd.Args, goCompileWithoutLink) cmd.Dir = fmt.Sprintf("%s/cmd/pd-server", workDir) + cmd.Args = append(cmd.Args, goCompileWithoutLink) + cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr if err := cmd.Run(); err != nil { @@ -620,6 +672,10 @@ func buildTestBinaryMulti(pkgs []string) error { p := strconv.Itoa(buildParallel) cmd := exec.Command("go", "test", "-p", p, "--exec", xprogPath, "-vet", "off", "--tags=tso_function_test,deadlock") + if coverProfile != "" { + coverpkg := "./..." + cmd.Args = append(cmd.Args, "-cover", fmt.Sprintf("-coverpkg=%s", coverpkg)) + } cmd.Args = append(cmd.Args, packages...) cmd.Dir = workDir cmd.Stdout = os.Stdout @@ -633,6 +689,9 @@ func buildTestBinaryMulti(pkgs []string) error { func buildTestBinary(pkg string) error { //nolint:gosec cmd := exec.Command("go", "test", "-c", "-vet", "off", "--tags=tso_function_test,deadlock", "-o", testFileName(pkg), "-v") + if coverProfile != "" { + cmd.Args = append(cmd.Args, "-cover", "-coverpkg=./...") + } if race { cmd.Args = append(cmd.Args, "-race") }