diff --git a/artifactory/commands/transferfiles/delayedartifactshandler.go b/artifactory/commands/transferfiles/delayedartifactshandler.go index 0d20057a0..4609b63fb 100644 --- a/artifactory/commands/transferfiles/delayedartifactshandler.go +++ b/artifactory/commands/transferfiles/delayedartifactshandler.go @@ -337,7 +337,12 @@ func (w *SplitContentWriter) closeCurrentFile() error { return err } if w.writer.GetFilePath() != "" { - fullPath := filepath.Join(w.dirPath, fmt.Sprintf("%s-%d.json", w.filePrefix, w.fileIndex)) + fullPath, err := getUniqueErrorOrDelayFilePath(w.dirPath, func() string { + return w.filePrefix + }) + if err != nil { + return err + } log.Debug(fmt.Sprintf("Saving split content JSON file to: %s.", fullPath)) if err := fileutils.MoveFile(w.writer.GetFilePath(), fullPath); err != nil { return fmt.Errorf("saving file failed! failed moving %s to %s: %w", w.writer.GetFilePath(), fullPath, err) diff --git a/artifactory/commands/transferfiles/errorshandler.go b/artifactory/commands/transferfiles/errorshandler.go index d7e5e8262..f716f2a2e 100644 --- a/artifactory/commands/transferfiles/errorshandler.go +++ b/artifactory/commands/transferfiles/errorshandler.go @@ -11,7 +11,6 @@ import ( "github.com/jfrog/jfrog-client-go/utils/io/fileutils" "github.com/jfrog/jfrog-client-go/utils/log" "os" - "path/filepath" "time" ) @@ -42,9 +41,7 @@ type TransferErrorsMng struct { type errorWriter struct { writer *content.ContentWriter errorCount int - // In case we have multiple errors files - we index them - fileIndex int - filePath string + filePath string } type errorWriterMng struct { @@ -116,7 +113,7 @@ func (mng *TransferErrorsMng) start() (err error) { if err != nil { return err } - writerRetry, retryFilePath, err := mng.newContentWriter(retryablePath, 0) + writerRetry, retryFilePath, err := mng.newUniqueContentWriter(retryablePath) if err != nil { return err } @@ -126,14 +123,14 @@ func (mng *TransferErrorsMng) start() (err error) { err = e } }() - writerMng.retryable = errorWriter{writer: writerRetry, fileIndex: 0, filePath: retryFilePath} + writerMng.retryable = errorWriter{writer: writerRetry, filePath: retryFilePath} // Init the content writer which is responsible for writing 'skipped errors' into files. // In the next run we won't retry and upload those files. skippedPath, err := getJfrogTransferRepoSkippedDir(mng.repoKey) if err != nil { return err } - writerSkip, skipFilePath, err := mng.newContentWriter(skippedPath, 0) + writerSkip, skipFilePath, err := mng.newUniqueContentWriter(skippedPath) if err != nil { return err } @@ -143,7 +140,7 @@ func (mng *TransferErrorsMng) start() (err error) { err = e } }() - writerMng.skipped = errorWriter{writer: writerSkip, fileIndex: 0, filePath: skipFilePath} + writerMng.skipped = errorWriter{writer: writerSkip, filePath: skipFilePath} mng.errorWriterMng = writerMng // Read errors from channel and write them to files. @@ -156,17 +153,22 @@ func (mng *TransferErrorsMng) start() (err error) { return } -func (mng *TransferErrorsMng) newContentWriter(dirPath string, index int) (*content.ContentWriter, string, error) { +func (mng *TransferErrorsMng) newUniqueContentWriter(dirPath string) (*content.ContentWriter, string, error) { writer, err := content.NewContentWriter("errors", true, false) if err != nil { return nil, "", err } - errorsFilePath := filepath.Join(dirPath, getErrorsFileName(mng.repoKey, mng.phaseId, mng.phaseStartTime, index)) + errorsFilePath, err := getUniqueErrorOrDelayFilePath(dirPath, func() string { + return getErrorsFileNamePrefix(mng.repoKey, mng.phaseId, mng.phaseStartTime) + }) + if err != nil { + return nil, "", err + } return writer, errorsFilePath, nil } -func getErrorsFileName(repoKey string, phaseId int, phaseStartTime string, index int) string { - return fmt.Sprintf("%s-%d-%s-%d.json", repoKey, phaseId, phaseStartTime, index) +func getErrorsFileNamePrefix(repoKey string, phaseId int, phaseStartTime string) string { + return fmt.Sprintf("%s-%d-%s", repoKey, phaseId, phaseStartTime) } func (mng *TransferErrorsMng) writeErrorContent(e ExtendedFileUploadStatusResponse) error { @@ -197,12 +199,11 @@ func (mng *TransferErrorsMng) writeSkippedErrorContent(e ExtendedFileUploadStatu return err } // Initialize variables for new errors file - mng.errorWriterMng.skipped.fileIndex++ dirPath, err := getJfrogTransferRepoSkippedDir(mng.repoKey) if err != nil { return err } - mng.errorWriterMng.skipped.writer, mng.errorWriterMng.skipped.filePath, err = mng.newContentWriter(dirPath, mng.errorWriterMng.skipped.fileIndex) + mng.errorWriterMng.skipped.writer, mng.errorWriterMng.skipped.filePath, err = mng.newUniqueContentWriter(dirPath) if err != nil { return err } @@ -222,12 +223,11 @@ func (mng *TransferErrorsMng) writeRetryableErrorContent(e ExtendedFileUploadSta return err } // Initialize variables for new errors file - mng.errorWriterMng.retryable.fileIndex++ dirPath, err := getJfrogTransferRepoRetryableDir(mng.repoKey) if err != nil { return err } - mng.errorWriterMng.retryable.writer, mng.errorWriterMng.retryable.filePath, err = mng.newContentWriter(dirPath, mng.errorWriterMng.retryable.fileIndex) + mng.errorWriterMng.retryable.writer, mng.errorWriterMng.retryable.filePath, err = mng.newUniqueContentWriter(dirPath) if err != nil { return err } diff --git a/artifactory/commands/transferfiles/errorshandler_test.go b/artifactory/commands/transferfiles/errorshandler_test.go index 9888534ba..3c3d70868 100644 --- a/artifactory/commands/transferfiles/errorshandler_test.go +++ b/artifactory/commands/transferfiles/errorshandler_test.go @@ -164,6 +164,6 @@ func writeEmptyErrorsFile(t *testing.T, repoKey string, retryable bool, phase, c assert.NoError(t, err) assert.NoError(t, fileutils.CreateDirIfNotExist(errorsDirPath)) - fileName := getErrorsFileName(repoKey, phase, state.ConvertTimeToEpochMilliseconds(time.Now()), counter) + fileName := fmt.Sprintf("%s-%d.json", getErrorsFileNamePrefix(repoKey, phase, state.ConvertTimeToEpochMilliseconds(time.Now())), counter) assert.NoError(t, os.WriteFile(filepath.Join(errorsDirPath, fileName), nil, 0644)) } diff --git a/artifactory/commands/transferfiles/filediff_test.go b/artifactory/commands/transferfiles/filediff_test.go new file mode 100644 index 000000000..3ee7d12ff --- /dev/null +++ b/artifactory/commands/transferfiles/filediff_test.go @@ -0,0 +1,34 @@ +package transferfiles + +import ( + "testing" + + "github.com/jfrog/jfrog-cli-core/v2/artifactory/commands/transferfiles/api" + servicesUtils "github.com/jfrog/jfrog-client-go/artifactory/services/utils" + "github.com/stretchr/testify/assert" +) + +var convertResultsToFileRepresentationTestCases = []struct { + input servicesUtils.ResultItem + expectedOutput api.FileRepresentation +}{ + { + servicesUtils.ResultItem{Repo: repo1Key, Path: "path-in-repo", Name: "file-name", Type: "file", Size: 100}, + api.FileRepresentation{Repo: repo1Key, Path: "path-in-repo", Name: "file-name", Size: 100}, + }, + { + servicesUtils.ResultItem{Repo: repo1Key, Path: "path-in-repo", Name: "folder-name", Type: "folder"}, + api.FileRepresentation{Repo: repo1Key, Path: "path-in-repo/folder-name"}, + }, + { + servicesUtils.ResultItem{Repo: repo1Key, Path: ".", Name: "folder-name", Type: "folder"}, + api.FileRepresentation{Repo: repo1Key, Path: "folder-name"}, + }, +} + +func TestConvertResultsToFileRepresentation(t *testing.T) { + for _, testCase := range convertResultsToFileRepresentationTestCases { + files := convertResultsToFileRepresentation([]servicesUtils.ResultItem{testCase.input}) + assert.Equal(t, []api.FileRepresentation{testCase.expectedOutput}, files) + } +} diff --git a/artifactory/commands/transferfiles/filesdiff.go b/artifactory/commands/transferfiles/filesdiff.go index 3558606b3..b905fa7c4 100644 --- a/artifactory/commands/transferfiles/filesdiff.go +++ b/artifactory/commands/transferfiles/filesdiff.go @@ -2,6 +2,7 @@ package transferfiles import ( "fmt" + "path" "time" "github.com/jfrog/gofrog/parallel" @@ -113,7 +114,7 @@ func (f *filesDiffPhase) handleTimeFrameFilesDiff(pcWrapper *producerConsumerWra paginationI := 0 for { - result, err := f.getTimeFrameFilesDiff(fromTimestamp, toTimestamp, paginationI) + result, lastPage, err := f.getTimeFrameFilesDiff(fromTimestamp, toTimestamp, paginationI) if err != nil { return err } @@ -145,7 +146,7 @@ func (f *filesDiffPhase) handleTimeFrameFilesDiff(pcWrapper *producerConsumerWra return err } - if len(result) < AqlPaginationLimit { + if lastPage { break } paginationI++ @@ -163,12 +164,26 @@ func (f *filesDiffPhase) handleTimeFrameFilesDiff(pcWrapper *producerConsumerWra func convertResultsToFileRepresentation(results []servicesUtils.ResultItem) (files []api.FileRepresentation) { for _, result := range results { - files = append(files, api.FileRepresentation{ - Repo: result.Repo, - Path: result.Path, - Name: result.Name, - Size: result.Size, - }) + switch result.Type { + case "folder": + var pathInRepo string + if result.Path == "." { + pathInRepo = result.Name + } else { + pathInRepo = path.Join(result.Path, result.Name) + } + files = append(files, api.FileRepresentation{ + Repo: result.Repo, + Path: pathInRepo, + }) + default: + files = append(files, api.FileRepresentation{ + Repo: result.Repo, + Path: result.Path, + Name: result.Name, + Size: result.Size, + }) + } } return } @@ -177,7 +192,11 @@ func convertResultsToFileRepresentation(results []servicesUtils.ResultItem) (fil // fromTimestamp - Time in RFC3339 represents the start time // toTimestamp - Time in RFC3339 represents the end time // paginationOffset - Requested page -func (f *filesDiffPhase) getTimeFrameFilesDiff(fromTimestamp, toTimestamp string, paginationOffset int) (result []servicesUtils.ResultItem, err error) { +// Return values: +// result - The list of changed files and folders between the input timestamps +// lastPage - True if we are in the last AQL page and it is not needed to run another AQL requests +// err - The error, if any occurred +func (f *filesDiffPhase) getTimeFrameFilesDiff(fromTimestamp, toTimestamp string, paginationOffset int) (result []servicesUtils.ResultItem, lastPage bool, err error) { var timeFrameFilesDiff *servicesUtils.AqlSearchResult if f.packageType == docker { // Handle Docker repositories. @@ -187,9 +206,11 @@ func (f *filesDiffPhase) getTimeFrameFilesDiff(fromTimestamp, toTimestamp string timeFrameFilesDiff, err = f.getNonDockerTimeFrameFilesDiff(fromTimestamp, toTimestamp, paginationOffset) } if err != nil { - return []servicesUtils.ResultItem{}, err + return []servicesUtils.ResultItem{}, true, err } - return f.locallyGeneratedFilter.FilterLocallyGenerated(timeFrameFilesDiff.Results) + lastPage = len(timeFrameFilesDiff.Results) < AqlPaginationLimit + result, err = f.locallyGeneratedFilter.FilterLocallyGenerated(timeFrameFilesDiff.Results) + return } func (f *filesDiffPhase) getNonDockerTimeFrameFilesDiff(fromTimestamp, toTimestamp string, paginationOffset int) (aqlResult *servicesUtils.AqlSearchResult, err error) { @@ -242,7 +263,7 @@ func (f *filesDiffPhase) getDockerTimeFrameFilesDiff(fromTimestamp, toTimestamp func generateDiffAqlQuery(repoKey, fromTimestamp, toTimestamp string, paginationOffset int) string { query := fmt.Sprintf(`items.find({"$and":[{"modified":{"$gte":"%s"}},{"modified":{"$lt":"%s"}},{"repo":"%s","type":"any"}]})`, fromTimestamp, toTimestamp, repoKey) - query += `.include("repo","path","name","modified","size")` + query += `.include("repo","path","name","type","modified","size")` query += fmt.Sprintf(`.sort({"$asc":["modified"]}).offset(%d).limit(%d)`, paginationOffset*AqlPaginationLimit, AqlPaginationLimit) return query } @@ -265,7 +286,7 @@ func generateGetDirContentAqlQuery(repoKey string, paths []string) string { func generateDockerManifestAqlQuery(repoKey, fromTimestamp, toTimestamp string, paginationOffset int) string { query := `items.find({"$and":` query += fmt.Sprintf(`[{"repo":"%s"},{"modified":{"$gte":"%s"}},{"modified":{"$lt":"%s"}},{"$or":[{"name":"manifest.json"},{"name":"list.manifest.json"}]}`, repoKey, fromTimestamp, toTimestamp) - query += `]}).include("repo","path","name","modified")` + query += `]}).include("repo","path","name","type","modified")` query += fmt.Sprintf(`.sort({"$asc":["modified"]}).offset(%d).limit(%d)`, paginationOffset*AqlPaginationLimit, AqlPaginationLimit) return query } diff --git a/artifactory/commands/transferfiles/fulltransfer.go b/artifactory/commands/transferfiles/fulltransfer.go index 9b2c6df13..773ccc24f 100644 --- a/artifactory/commands/transferfiles/fulltransfer.go +++ b/artifactory/commands/transferfiles/fulltransfer.go @@ -182,7 +182,9 @@ func (m *fullTransferPhase) searchAndHandleFolderContents(params folderParams, p } // Add the folder as a candidate to transfer. The reason is that we'd like to transfer only folders with properties or empty folders. - curUploadChunk.AppendUploadCandidateIfNeeded(api.FileRepresentation{Repo: m.repoKey, Path: params.relativePath, NonEmptyDir: len(result) > 0}, m.buildInfoRepo) + if params.relativePath != "." { + curUploadChunk.AppendUploadCandidateIfNeeded(api.FileRepresentation{Repo: m.repoKey, Path: params.relativePath, NonEmptyDir: len(result) > 0}, m.buildInfoRepo) + } // Empty folder if paginationI == 0 && len(result) == 0 { diff --git a/artifactory/commands/transferfiles/manager.go b/artifactory/commands/transferfiles/manager.go index 0a85fd124..2b825f88b 100644 --- a/artifactory/commands/transferfiles/manager.go +++ b/artifactory/commands/transferfiles/manager.go @@ -259,19 +259,25 @@ func pollUploads(phaseBase *phaseBase, srcUpService *srcUserPluginService, uploa if phaseBase != nil { timeEstMng = &phaseBase.stateManager.TimeEstimationManager } - for { + for i := 0; ; i++ { if ShouldStop(phaseBase, nil, errorsChannelMng) { return } time.Sleep(waitTimeBetweenChunkStatusSeconds * time.Second) - // 'Working threads' are determined by how many upload chunks are currently being processed by the source Artifactory instance. - if err := phaseBase.stateManager.SetWorkingThreads(curProcessedUploadChunks); err != nil { - log.Error("Couldn't set the current number of working threads:", err.Error()) + // Run once per 3 minutes + if i%60 == 0 { + // 'Working threads' are determined by how many upload chunks are currently being processed by the source Artifactory instance. + if err := phaseBase.stateManager.SetWorkingThreads(curProcessedUploadChunks); err != nil { + log.Error("Couldn't set the current number of working threads:", err.Error()) + } } - // Each uploading thread receive a token and a node id from the source via the uploadChunkChan, so this go routine can poll on its status. + // Each uploading thread receives a token and a node id from the source via the uploadChunkChan, so this go routine can poll on its status. fillChunkDataBatch(&chunksLifeCycleManager, uploadChunkChan) + if err := chunksLifeCycleManager.StoreStaleChunks(phaseBase.stateManager); err != nil { + log.Error("Couldn't store the stale chunks:", err.Error()) + } // When totalChunks size is zero, it means that all the tokens are uploaded, // we received 'DONE' for all of them, and we notified the source that they can be deleted from the memory. // If during the polling some chunks data were lost due to network issues, either on the client or on the source, diff --git a/artifactory/commands/transferfiles/state/runstatus.go b/artifactory/commands/transferfiles/state/runstatus.go index df1db470a..1b9ffa9d6 100644 --- a/artifactory/commands/transferfiles/state/runstatus.go +++ b/artifactory/commands/transferfiles/state/runstatus.go @@ -38,6 +38,19 @@ type TransferRunStatus struct { WorkingThreads int `json:"working_threads,omitempty"` TransferFailures uint `json:"transfer_failures,omitempty"` TimeEstimationManager `json:"time_estimation,omitempty"` + StaleChunks []StaleChunks `json:"stale_chunks,omitempty"` +} + +// This structure contains a collection of chunks that have been undergoing processing for over 30 minutes +type StaleChunks struct { + NodeID string `json:"node_id,omitempty"` + Chunks []StaleChunk `json:"stale_node_chunks,omitempty"` +} + +type StaleChunk struct { + ChunkID string `json:"chunk_id,omitempty"` + Files []string `json:"files,omitempty"` + Sent int64 `json:"sent,omitempty"` } func (ts *TransferRunStatus) action(action ActionOnStatusFunc) error { diff --git a/artifactory/commands/transferfiles/state/statemanager.go b/artifactory/commands/transferfiles/state/statemanager.go index 389631f96..5425832e1 100644 --- a/artifactory/commands/transferfiles/state/statemanager.go +++ b/artifactory/commands/transferfiles/state/statemanager.go @@ -302,6 +302,20 @@ func (ts *TransferStateManager) GetWorkingThreads() (workingThreads int, err err }) } +func (ts *TransferStateManager) SetStaleChunks(staleChunks []StaleChunks) error { + return ts.action(func(transferRunStatus *TransferRunStatus) error { + transferRunStatus.StaleChunks = staleChunks + return nil + }) +} + +func (ts *TransferStateManager) GetStaleChunks() (staleChunks []StaleChunks, err error) { + return staleChunks, ts.action(func(transferRunStatus *TransferRunStatus) error { + staleChunks = transferRunStatus.StaleChunks + return nil + }) +} + func (ts *TransferStateManager) SaveStateAndSnapshots() error { ts.TransferState.lastSaveTimestamp = time.Now() if err := ts.persistTransferState(false); err != nil { @@ -361,7 +375,7 @@ func GetRunningTime() (runningTime string, isRunning bool, err error) { return } runningSecs := int64(time.Since(time.Unix(0, startTimestamp)).Seconds()) - return secondsToLiteralTime(runningSecs, ""), true, nil + return SecondsToLiteralTime(runningSecs, ""), true, nil } func UpdateChunkInState(stateManager *TransferStateManager, chunk *api.ChunkStatus) (err error) { diff --git a/artifactory/commands/transferfiles/state/timeestimation.go b/artifactory/commands/transferfiles/state/timeestimation.go index 5fdf82089..9ca2e63ec 100644 --- a/artifactory/commands/transferfiles/state/timeestimation.go +++ b/artifactory/commands/transferfiles/state/timeestimation.go @@ -2,6 +2,7 @@ package state import ( "fmt" + "github.com/jfrog/jfrog-cli-core/v2/artifactory/commands/transferfiles/api" "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" @@ -185,7 +186,7 @@ func (tem *TimeEstimationManager) GetEstimatedRemainingTimeString() string { return err.Error() } - return secondsToLiteralTime(remainingTimeSec, "About ") + return SecondsToLiteralTime(remainingTimeSec, "About ") } func (tem *TimeEstimationManager) isTimeEstimationAvailable() bool { diff --git a/artifactory/commands/transferfiles/state/utils.go b/artifactory/commands/transferfiles/state/utils.go index 789c6cf46..6f87a5711 100644 --- a/artifactory/commands/transferfiles/state/utils.go +++ b/artifactory/commands/transferfiles/state/utils.go @@ -2,14 +2,15 @@ package state import ( "fmt" - "github.com/jfrog/build-info-go/utils" - "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" - "github.com/jfrog/jfrog-client-go/utils/errorutils" - "github.com/jfrog/jfrog-client-go/utils/io/fileutils" "path/filepath" "strconv" "strings" "time" + + "github.com/jfrog/build-info-go/utils" + "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" + "github.com/jfrog/jfrog-client-go/utils/errorutils" + "github.com/jfrog/jfrog-client-go/utils/io/fileutils" ) const ( @@ -36,9 +37,9 @@ func ConvertTimeToEpochMilliseconds(timeToConvert time.Time) string { return strconv.FormatInt(timeToConvert.UnixMilli(), 10) } -// secondsToLiteralTime converts a number of seconds to an easy-to-read string. +// SecondsToLiteralTime converts a number of seconds to an easy-to-read string. // Prefix is not taken into account if the time is less than a minute. -func secondsToLiteralTime(secondsToConvert int64, prefix string) string { +func SecondsToLiteralTime(secondsToConvert int64, prefix string) string { daysTime := secondsToConvert / secondsInDay daysTimeInSecs := daysTime * secondsInDay hoursTime := (secondsToConvert - daysTimeInSecs) / secondsInHour diff --git a/artifactory/commands/transferfiles/state/utils_test.go b/artifactory/commands/transferfiles/state/utils_test.go index d8375f196..89fb980de 100644 --- a/artifactory/commands/transferfiles/state/utils_test.go +++ b/artifactory/commands/transferfiles/state/utils_test.go @@ -1,8 +1,9 @@ package state import ( - "github.com/stretchr/testify/assert" "testing" + + "github.com/stretchr/testify/assert" ) func TestSecondsToLiteralTime(t *testing.T) { @@ -32,7 +33,7 @@ func TestSecondsToLiteralTime(t *testing.T) { for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { - assert.Equal(t, testCase.expected, secondsToLiteralTime(testCase.secsToConvert, testCase.prefix)) + assert.Equal(t, testCase.expected, SecondsToLiteralTime(testCase.secsToConvert, testCase.prefix)) }) } } diff --git a/artifactory/commands/transferfiles/status.go b/artifactory/commands/transferfiles/status.go index 4917993c1..db95454cd 100644 --- a/artifactory/commands/transferfiles/status.go +++ b/artifactory/commands/transferfiles/status.go @@ -5,6 +5,7 @@ import ( "path/filepath" "strconv" "strings" + "time" "github.com/jfrog/jfrog-cli-core/v2/artifactory/commands/transferfiles/api" "github.com/jfrog/jfrog-cli-core/v2/artifactory/commands/transferfiles/state" @@ -23,7 +24,7 @@ func ShowStatus() error { return err } if !isRunning { - addString(&output, "🔴", "Status", "Not running", 0, coreutils.IsWindows()) + addString(&output, "🔴", "Status", "Not running", 0) log.Output(output.String()) return nil } @@ -32,7 +33,7 @@ func ShowStatus() error { return err } if isStopping { - addString(&output, "🟡", "Status", "Stopping", 0, coreutils.IsWindows()) + addString(&output, "🟡", "Status", "Stopping", 0) log.Output(output.String()) return nil } @@ -54,6 +55,7 @@ func ShowStatus() error { output.WriteString("\n") setRepositoryStatus(stateManager, &output) } + addStaleChunks(stateManager, &output) log.Output(output.String()) return nil } @@ -68,20 +70,19 @@ func isStopping() (bool, error) { } func addOverallStatus(stateManager *state.TransferStateManager, output *strings.Builder, runningTime string) { - windows := coreutils.IsWindows() addTitle(output, "Overall Transfer Status") - addString(output, coreutils.RemoveEmojisIfNonSupportedTerminal("🟢"), "Status", "Running", 3, windows) - addString(output, "🏃", "Running for", runningTime, 3, windows) - addString(output, "🗄 ", "Storage", sizeToString(stateManager.OverallTransfer.TransferredSizeBytes)+" / "+sizeToString(stateManager.OverallTransfer.TotalSizeBytes)+calcPercentageInt64(stateManager.OverallTransfer.TransferredSizeBytes, stateManager.OverallTransfer.TotalSizeBytes), 3, windows) - addString(output, "📦", "Repositories", fmt.Sprintf("%d / %d", stateManager.TotalRepositories.TransferredUnits, stateManager.TotalRepositories.TotalUnits)+calcPercentageInt64(stateManager.TotalRepositories.TransferredUnits, stateManager.TotalRepositories.TotalUnits), 2, windows) - addString(output, "🧵", "Working threads", strconv.Itoa(stateManager.WorkingThreads), 2, windows) - addString(output, "⚡", "Transfer speed", stateManager.GetSpeedString(), 2, windows) - addString(output, "⌛", "Estimated time remaining", stateManager.GetEstimatedRemainingTimeString(), 1, windows) + addString(output, coreutils.RemoveEmojisIfNonSupportedTerminal("🟢"), "Status", "Running", 3) + addString(output, "🏃", "Running for", runningTime, 3) + addString(output, "🗄 ", "Storage", sizeToString(stateManager.OverallTransfer.TransferredSizeBytes)+" / "+sizeToString(stateManager.OverallTransfer.TotalSizeBytes)+calcPercentageInt64(stateManager.OverallTransfer.TransferredSizeBytes, stateManager.OverallTransfer.TotalSizeBytes), 3) + addString(output, "📦", "Repositories", fmt.Sprintf("%d / %d", stateManager.TotalRepositories.TransferredUnits, stateManager.TotalRepositories.TotalUnits)+calcPercentageInt64(stateManager.TotalRepositories.TransferredUnits, stateManager.TotalRepositories.TotalUnits), 2) + addString(output, "🧵", "Working threads", strconv.Itoa(stateManager.WorkingThreads), 2) + addString(output, "⚡", "Transfer speed", stateManager.GetSpeedString(), 2) + addString(output, "⌛", "Estimated time remaining", stateManager.GetEstimatedRemainingTimeString(), 1) failureTxt := strconv.FormatUint(uint64(stateManager.TransferFailures), 10) if stateManager.TransferFailures > 0 { failureTxt += " (" + "In Phase 3 and in subsequent executions, we'll retry transferring the failed files." + ")" } - addString(output, "❌", "Transfer failures", failureTxt, 2, windows) + addString(output, "❌", "Transfer failures", failureTxt, 2) } func calcPercentageInt64(transferred, total int64) string { @@ -92,21 +93,41 @@ func calcPercentageInt64(transferred, total int64) string { } func setRepositoryStatus(stateManager *state.TransferStateManager, output *strings.Builder) { - windows := coreutils.IsWindows() addTitle(output, "Current Repository Status") - addString(output, "🏷 ", "Name", stateManager.CurrentRepoKey, 2, windows) + addString(output, "🏷 ", "Name", stateManager.CurrentRepoKey, 2) currentRepo := stateManager.CurrentRepo switch stateManager.CurrentRepoPhase { case api.Phase1, api.Phase3: if stateManager.CurrentRepoPhase == api.Phase1 { - addString(output, "🔢", "Phase", "Transferring all files in the repository (1/3)", 2, windows) + addString(output, "🔢", "Phase", "Transferring all files in the repository (1/3)", 2) } else { - addString(output, "🔢", "Phase", "Retrying transfer failures (3/3)", 2, windows) + addString(output, "🔢", "Phase", "Retrying transfer failures (3/3)", 2) } - addString(output, "🗄 ", "Storage", sizeToString(currentRepo.Phase1Info.TransferredSizeBytes)+" / "+sizeToString(currentRepo.Phase1Info.TotalSizeBytes)+calcPercentageInt64(currentRepo.Phase1Info.TransferredSizeBytes, currentRepo.Phase1Info.TotalSizeBytes), 2, windows) - addString(output, "📄", "Files", fmt.Sprintf("%d / %d", currentRepo.Phase1Info.TransferredUnits, currentRepo.Phase1Info.TotalUnits)+calcPercentageInt64(currentRepo.Phase1Info.TransferredUnits, currentRepo.Phase1Info.TotalUnits), 2, windows) + addString(output, "🗄 ", "Storage", sizeToString(currentRepo.Phase1Info.TransferredSizeBytes)+" / "+sizeToString(currentRepo.Phase1Info.TotalSizeBytes)+calcPercentageInt64(currentRepo.Phase1Info.TransferredSizeBytes, currentRepo.Phase1Info.TotalSizeBytes), 2) + addString(output, "📄", "Files", fmt.Sprintf("%d / %d", currentRepo.Phase1Info.TransferredUnits, currentRepo.Phase1Info.TotalUnits)+calcPercentageInt64(currentRepo.Phase1Info.TransferredUnits, currentRepo.Phase1Info.TotalUnits), 2) case api.Phase2: - addString(output, "🔢", "Phase", "Transferring newly created and modified files (2/3)", 2, windows) + addString(output, "🔢", "Phase", "Transferring newly created and modified files (2/3)", 2) + } +} + +func addStaleChunks(stateManager *state.TransferStateManager, output *strings.Builder) { + if len(stateManager.StaleChunks) == 0 { + return + } + output.WriteString("\n") + addTitle(output, "File Chunks in Transit for More than 30 Minutes") + + for _, nodeStaleChunks := range stateManager.StaleChunks { + addString(output, "🏷️ ", "Node ID", nodeStaleChunks.NodeID, 1) + for _, staleChunks := range nodeStaleChunks.Chunks { + addString(output, " 🏷️ ", "Chunk ID", staleChunks.ChunkID, 1) + sent := time.Unix(staleChunks.Sent, 0) + runningSecs := int64(time.Since(sent).Seconds()) + addString(output, " ⏱️ ", "Sent", sent.Format(time.DateTime)+" ("+state.SecondsToLiteralTime(runningSecs, "")+")", 1) + for _, file := range staleChunks.Files { + output.WriteString("\t\t📄 " + file + "\n") + } + } } } @@ -114,13 +135,13 @@ func addTitle(output *strings.Builder, title string) { output.WriteString(coreutils.PrintBoldTitle(title + "\n")) } -func addString(output *strings.Builder, emoji, key, value string, tabsCount int, windows bool) { +func addString(output *strings.Builder, emoji, key, value string, tabsCount int) { indentation := strings.Repeat("\t", tabsCount) if indentation == "" { indentation = " " } if len(emoji) > 0 { - if windows { + if coreutils.IsWindows() { emoji = "●" } emoji += " " diff --git a/artifactory/commands/transferfiles/status_test.go b/artifactory/commands/transferfiles/status_test.go index 8cb2f0983..f1c7226ab 100644 --- a/artifactory/commands/transferfiles/status_test.go +++ b/artifactory/commands/transferfiles/status_test.go @@ -3,6 +3,7 @@ package transferfiles import ( "bytes" "testing" + "time" "github.com/jfrog/build-info-go/utils" "github.com/jfrog/jfrog-cli-core/v2/artifactory/commands/transferfiles/api" @@ -53,7 +54,7 @@ func TestShowStatus(t *testing.T) { defer cleanUp() // Create state manager and persist to file system - createStateManager(t, api.Phase1, false) + createStateManager(t, api.Phase1, false, false) // Run show status and check output assert.NoError(t, ShowStatus()) @@ -83,7 +84,7 @@ func TestShowStatusDiffPhase(t *testing.T) { defer cleanUp() // Create state manager and persist to file system - createStateManager(t, api.Phase2, false) + createStateManager(t, api.Phase2, false, false) // Run show status and check output assert.NoError(t, ShowStatus()) @@ -113,7 +114,7 @@ func TestShowBuildInfoRepo(t *testing.T) { defer cleanUp() // Create state manager and persist to file system - createStateManager(t, api.Phase3, true) + createStateManager(t, api.Phase3, true, false) // Run show status and check output assert.NoError(t, ShowStatus()) @@ -138,10 +139,30 @@ func TestShowBuildInfoRepo(t *testing.T) { assert.Contains(t, results, "Files: 500 / 10000 (5.0%)") } +func TestShowStaleChunks(t *testing.T) { + buffer, cleanUp := initStatusTest(t) + defer cleanUp() + + // Create state manager and persist to file system + createStateManager(t, api.Phase1, false, true) + + // Run show status and check output + assert.NoError(t, ShowStatus()) + results := buffer.String() + + // Check stale chunks + assert.Contains(t, results, "File Chunks in Transit for More than 30 Minutes") + assert.Contains(t, results, "Node ID:\tnode-id-1") + assert.Contains(t, results, "Sent:\t") + assert.Contains(t, results, "(31 minutes)") + assert.Contains(t, results, "a/b/c") + assert.Contains(t, results, "d/e/f") +} + // Create state manager and persist in the file system. // t - The testing object // phase - Phase ID -func createStateManager(t *testing.T, phase int, buildInfoRepo bool) { +func createStateManager(t *testing.T, phase int, buildInfoRepo bool, staleChunks bool) { stateManager, err := state.NewTransferStateManager(false) assert.NoError(t, err) assert.NoError(t, stateManager.TryLockTransferStateManager()) @@ -159,6 +180,19 @@ func createStateManager(t *testing.T, phase int, buildInfoRepo bool) { stateManager.TimeEstimationManager.LastSpeedsSum = 12 stateManager.TimeEstimationManager.SpeedsAverage = 12 + if staleChunks { + stateManager.StaleChunks = append(stateManager.StaleChunks, state.StaleChunks{ + NodeID: staleChunksNodeIdOne, + Chunks: []state.StaleChunk{ + { + ChunkID: staleChunksChunkId, + Sent: time.Now().Add(-time.Minute * 31).Unix(), + Files: []string{"a/b/c", "d/e/f"}, + }, + }, + }) + } + // Increment transferred size and files. This action also persists the run status. assert.NoError(t, stateManager.IncTransferredSizeAndFilesPhase1(500, 5000)) diff --git a/artifactory/commands/transferfiles/utils.go b/artifactory/commands/transferfiles/utils.go index f33283bf6..668bebc1d 100644 --- a/artifactory/commands/transferfiles/utils.go +++ b/artifactory/commands/transferfiles/utils.go @@ -7,6 +7,7 @@ import ( "fmt" "io" "os" + "path" "path/filepath" "strconv" "strings" @@ -92,6 +93,37 @@ func (clcm *ChunksLifeCycleManager) GetInProgressTokensSliceByNodeId(nodeId node return inProgressTokens } +// Save in the TransferRunStatus the chunks that have been in transit for more than 30 minutes. +// This allows them to be displayed using the '--status' option. +// stateManager - Transfer state manager +func (clcm *ChunksLifeCycleManager) StoreStaleChunks(stateManager *state.TransferStateManager) error { + var staleChunks []state.StaleChunks + for nodeId, chunkIdToData := range clcm.nodeToChunksMap { + staleNodeChunks := state.StaleChunks{NodeID: string(nodeId)} + for chunkId, uploadedChunkData := range chunkIdToData { + if time.Since(uploadedChunkData.TimeSent).Hours() < 0.5 { + continue + } + staleNodeChunk := state.StaleChunk{ + ChunkID: string(chunkId), + Sent: uploadedChunkData.TimeSent.Unix(), + } + for _, file := range uploadedChunkData.ChunkFiles { + var sizeStr string + if file.Size > 0 { + sizeStr = " (" + utils.ConvertIntToStorageSizeString(file.Size) + ")" + } + staleNodeChunk.Files = append(staleNodeChunk.Files, path.Join(file.Repo, file.Path, file.Name)+sizeStr) + } + staleNodeChunks.Chunks = append(staleNodeChunks.Chunks, staleNodeChunk) + } + if len(staleNodeChunks.Chunks) > 0 { + staleChunks = append(staleChunks, staleNodeChunks) + } + } + return stateManager.SetStaleChunks(staleChunks) +} + type InterruptionErr struct{} func (m *InterruptionErr) Error() string { @@ -682,3 +714,21 @@ func getErrorOrDelayFiles(repoKeys []string, getDirPathFunc func(string) (string } return } + +// Increments index until the file path is unique. +func getUniqueErrorOrDelayFilePath(dirPath string, getFileNamePrefix func() string) (delayFilePath string, err error) { + var exists bool + index := 0 + for { + delayFilePath = filepath.Join(dirPath, fmt.Sprintf("%s-%d.json", getFileNamePrefix(), index)) + exists, err = fileutils.IsFileExists(delayFilePath, false) + if err != nil { + return "", err + } + if !exists { + break + } + index++ + } + return +} diff --git a/artifactory/commands/transferfiles/utils_test.go b/artifactory/commands/transferfiles/utils_test.go index f5b2d8e90..0f38b2100 100644 --- a/artifactory/commands/transferfiles/utils_test.go +++ b/artifactory/commands/transferfiles/utils_test.go @@ -8,9 +8,13 @@ import ( "net/http" "net/http/httptest" "os" + "strconv" "strings" "testing" + "time" + "github.com/jfrog/jfrog-cli-core/v2/artifactory/commands/transferfiles/api" + "github.com/jfrog/jfrog-cli-core/v2/artifactory/commands/transferfiles/state" "github.com/jfrog/jfrog-cli-core/v2/utils/config" "github.com/jfrog/jfrog-cli-core/v2/utils/tests" "github.com/jfrog/jfrog-client-go/artifactory/services" @@ -40,8 +44,16 @@ const runningNodesResponse = ` } ` +const ( + staleChunksNodeIdOne = "node-id-1" + staleChunksNodeIdTwo = "node-id-2" + staleChunksChunkId = "chunk-id" + staleChunksPath = "path-in-repo" + staleChunksName = "file-name" +) + func TestGetRunningNodes(t *testing.T) { - testServer, serverDetails, _ := createMockServer(t, func(w http.ResponseWriter, r *http.Request) { + testServer, serverDetails, _ := createMockServer(t, func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusOK) _, err := w.Write([]byte(runningNodesResponse)) assert.NoError(t, err) @@ -56,7 +68,7 @@ func TestGetRunningNodes(t *testing.T) { func TestStopTransferOnArtifactoryNodes(t *testing.T) { stoppedNodeOne, stoppedNodeTwo := false, false requestNumber := 0 - testServer, _, srcUpService := createMockServer(t, func(w http.ResponseWriter, r *http.Request) { + testServer, _, srcUpService := createMockServer(t, func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusOK) var nodeId string if requestNumber == 0 { @@ -244,6 +256,109 @@ func TestInterruptIfRequested(t *testing.T) { assert.Equal(t, os.Interrupt, actualSignal) } +func TestStoreStaleChunksEmpty(t *testing.T) { + // Init state manager + stateManager, cleanUp := state.InitStateTest(t) + defer cleanUp() + + // Store empty stale chunks + chunksLifeCycleManager := ChunksLifeCycleManager{ + nodeToChunksMap: make(map[nodeId]map[api.ChunkId]UploadedChunkData), + } + assert.NoError(t, chunksLifeCycleManager.StoreStaleChunks(stateManager)) + + // Make sure no chunks + staleChunks, err := stateManager.GetStaleChunks() + assert.NoError(t, err) + assert.Empty(t, staleChunks) +} + +func TestStoreStaleChunksNoStale(t *testing.T) { + // Init state manager + stateManager, cleanUp := state.InitStateTest(t) + defer cleanUp() + + // Store chunk that is not stale + chunksLifeCycleManager := ChunksLifeCycleManager{ + nodeToChunksMap: map[nodeId]map[api.ChunkId]UploadedChunkData{ + staleChunksNodeIdOne: { + staleChunksChunkId: { + TimeSent: time.Now().Add(-time.Minute), + ChunkFiles: []api.FileRepresentation{{Repo: repo1Key, Path: staleChunksPath, Name: staleChunksName}}, + }, + }, + }, + } + assert.NoError(t, chunksLifeCycleManager.StoreStaleChunks(stateManager)) + + // Make sure no chunks + staleChunks, err := stateManager.GetStaleChunks() + assert.NoError(t, err) + assert.Empty(t, staleChunks) +} + +func TestStoreStaleChunksStale(t *testing.T) { + // Init state manager + stateManager, cleanUp := state.InitStateTest(t) + defer cleanUp() + + // Store stale chunk + sent := time.Now().Add(-time.Hour) + chunksLifeCycleManager := ChunksLifeCycleManager{ + nodeToChunksMap: map[nodeId]map[api.ChunkId]UploadedChunkData{ + staleChunksNodeIdOne: { + staleChunksChunkId: { + TimeSent: sent, + ChunkFiles: []api.FileRepresentation{{Repo: repo1Key, Path: staleChunksPath, Name: staleChunksName, Size: 100}}, + }, + }, + }, + } + assert.NoError(t, chunksLifeCycleManager.StoreStaleChunks(stateManager)) + + // Make sure the stale chunk was stored in the state + staleChunks, err := stateManager.GetStaleChunks() + assert.NoError(t, err) + assert.Len(t, staleChunks, 1) + assert.Equal(t, staleChunksNodeIdOne, staleChunks[0].NodeID) + assert.Len(t, staleChunks[0].Chunks, 1) + assert.Equal(t, staleChunksChunkId, staleChunks[0].Chunks[0].ChunkID) + assert.Equal(t, sent.Unix(), staleChunks[0].Chunks[0].Sent) + assert.Len(t, staleChunks[0].Chunks[0].Files, 1) + assert.Equal(t, fmt.Sprintf("%s/%s/%s (0.1KB)", repo1Key, staleChunksPath, staleChunksName), staleChunks[0].Chunks[0].Files[0]) +} + +func TestStoreStaleChunksTwoNodes(t *testing.T) { + // Init state manager + stateManager, cleanUp := state.InitStateTest(t) + defer cleanUp() + + // Store 1 stale chunk and 1 non-stale chunk + chunksLifeCycleManager := ChunksLifeCycleManager{ + nodeToChunksMap: map[nodeId]map[api.ChunkId]UploadedChunkData{ + staleChunksNodeIdOne: { + staleChunksChunkId: { + TimeSent: time.Now().Add(-time.Hour), // Older than 0.5 hours + ChunkFiles: []api.FileRepresentation{{Repo: repo1Key, Path: staleChunksPath, Name: staleChunksName, Size: 1024}}, + }, + }, + staleChunksNodeIdTwo: { + staleChunksChunkId: { + TimeSent: time.Now(), // Less than 0.5 hours + ChunkFiles: []api.FileRepresentation{{Repo: repo2Key, Path: staleChunksPath, Name: staleChunksName, Size: 0}}, + }, + }, + }, + } + assert.NoError(t, chunksLifeCycleManager.StoreStaleChunks(stateManager)) + + // Make sure only the stale chunk was stored in the state + staleChunks, err := stateManager.GetStaleChunks() + assert.NoError(t, err) + assert.Len(t, staleChunks, 1) + assert.Equal(t, staleChunksNodeIdOne, staleChunks[0].NodeID) +} + // Create mock server to test transfer config commands // t - The testing object // testHandler - The HTTP handler of the test @@ -255,3 +370,24 @@ func createMockServer(t *testing.T, testHandler transferFilesHandler) (*httptest assert.NoError(t, err) return testServer, serverDetails, serviceManager } + +func TestGetUniqueErrorOrDelayFilePath(t *testing.T) { + tmpDir, err := os.MkdirTemp("", "unique_file_path_test") + assert.NoError(t, err) + + createUniqueFileAndAssertCounter(t, tmpDir, "prefix", 0) + // A file with 0 already exists, so new counter should be 1. + createUniqueFileAndAssertCounter(t, tmpDir, "prefix", 1) + // Unique prefix, so counter should be 0. + createUniqueFileAndAssertCounter(t, tmpDir, "new", 0) + +} + +func createUniqueFileAndAssertCounter(t *testing.T, tmpDir, prefix string, expectedCounter int) { + filePath, err := getUniqueErrorOrDelayFilePath(tmpDir, func() string { + return prefix + }) + assert.NoError(t, err) + assert.NoError(t, os.WriteFile(filePath, nil, 0644)) + assert.True(t, strings.HasSuffix(filePath, strconv.Itoa(expectedCounter)+".json")) +} diff --git a/artifactory/utils/dependenciesutils.go b/artifactory/utils/dependenciesutils.go index b65755bcc..e595dcce1 100644 --- a/artifactory/utils/dependenciesutils.go +++ b/artifactory/utils/dependenciesutils.go @@ -3,11 +3,6 @@ package utils import ( "errors" "fmt" - "net/http" - "os" - "path" - "path/filepath" - "github.com/jfrog/jfrog-cli-core/v2/utils/config" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" xrayutils "github.com/jfrog/jfrog-cli-core/v2/xray/utils" @@ -17,6 +12,10 @@ import ( "github.com/jfrog/jfrog-client-go/utils/io/fileutils" "github.com/jfrog/jfrog-client-go/utils/io/httputils" "github.com/jfrog/jfrog-client-go/utils/log" + "net/http" + "os" + "path" + "path/filepath" ) const ( @@ -113,21 +112,19 @@ func createChecksumFile(targetPath, checksum string) (err error) { return } -// The GetExtractorsRemoteDetails function is responsible for retrieving the server details necessary to download the build-info extractors. +// GetExtractorsRemoteDetails retrieves the server details necessary to download the build-info extractors from a remote repository. // downloadPath - specifies the path in the remote repository from which the extractors will be downloaded. func GetExtractorsRemoteDetails(downloadPath string) (server *config.ServerDetails, remoteRepo string, err error) { - server, remoteRepo, err = getRemoteDetailsFromEnv(downloadPath) - if remoteRepo != "" || err != nil { - return + // Download from the remote repository that proxies https://releases.jfrog.io + server, remoteRepo, err = getExtractorsRemoteDetailsFromEnv(downloadPath) + if remoteRepo == "" && err == nil { + // Fallback to the deprecated JFROG_CLI_EXTRACTORS_REMOTE environment variable + server, remoteRepo, err = getExtractorsRemoteDetailsFromLegacyEnv(downloadPath) } - // Fallback to the deprecated JFROG_CLI_EXTRACTORS_REMOTE environment variable - server, remoteRepo, err = getLegacyRemoteDetailsFromEnv(downloadPath) if remoteRepo != "" || err != nil { - log.Warn(fmt.Sprintf("You are using the deprecated %q environment variable. Use %q instead.\nRead more about it at %sjfrog-cli/downloading-the-maven-and-gradle-extractor-jars", - coreutils.DeprecatedExtractorsRemoteEnv, coreutils.ReleasesRemoteEnv, coreutils.JFrogHelpUrl)) return } - + // Download directly from https://releases.jfrog.io log.Info("The build-info-extractor jar is not cached locally. Downloading it now...\n" + "You can set the repository from which this jar is downloaded.\n" + "Read more about it at " + coreutils.JFrogHelpUrl + "jfrog-cli/downloading-the-maven-and-gradle-extractor-jars") @@ -136,12 +133,22 @@ func GetExtractorsRemoteDetails(downloadPath string) (server *config.ServerDetai return &config.ServerDetails{ArtifactoryUrl: coreutils.JfrogReleasesUrl}, path.Join("oss-release-local", downloadPath), nil } -func getRemoteDetailsFromEnv(downloadPath string) (server *config.ServerDetails, remoteRepo string, err error) { - return getRemoteDetails(downloadPath, coreutils.ReleasesRemoteEnv) +func getExtractorsRemoteDetailsFromEnv(downloadPath string) (server *config.ServerDetails, remoteRepo string, err error) { + server, remoteRepo, err = getRemoteDetails(coreutils.ReleasesRemoteEnv) + if remoteRepo != "" && err == nil { + remoteRepo = getFullExtractorsPathInArtifactory(remoteRepo, coreutils.ReleasesRemoteEnv, downloadPath) + } + return } -func getLegacyRemoteDetailsFromEnv(downloadPath string) (server *config.ServerDetails, remoteRepo string, err error) { - return getRemoteDetails(downloadPath, coreutils.DeprecatedExtractorsRemoteEnv) +func getExtractorsRemoteDetailsFromLegacyEnv(downloadPath string) (server *config.ServerDetails, remoteRepo string, err error) { + server, remoteRepo, err = getRemoteDetails(coreutils.DeprecatedExtractorsRemoteEnv) + if remoteRepo != "" && err == nil { + log.Warn(fmt.Sprintf("You are using the deprecated %q environment variable. Use %q instead.\nRead more about it at %sjfrog-cli/downloading-the-maven-and-gradle-extractor-jars", + coreutils.DeprecatedExtractorsRemoteEnv, coreutils.ReleasesRemoteEnv, coreutils.JFrogHelpUrl)) + remoteRepo = getFullExtractorsPathInArtifactory(remoteRepo, coreutils.DeprecatedExtractorsRemoteEnv, downloadPath) + } + return } // getRemoteDetails function retrieves the server details and downloads path for the build-info extractor file. @@ -149,20 +156,16 @@ func getLegacyRemoteDetailsFromEnv(downloadPath string) (server *config.ServerDe // downloadPath - specifies the path in the remote repository from which the extractors will be downloaded. // remoteEnv - the relevant environment variable that was used: releasesRemoteEnv/ExtractorsRemoteEnv. // The function returns the server that matches the given server ID, the complete path of the build-info extractor concatenated with the specified remote repository, and an error if occurred. -func getRemoteDetails(downloadPath, remoteEnv string) (server *config.ServerDetails, fullRemoteRepoPath string, err error) { +func getRemoteDetails(remoteEnv string) (server *config.ServerDetails, repoName string, err error) { serverID, repoName, err := coreutils.GetServerIdAndRepo(remoteEnv) if err != nil { return } if serverID == "" && repoName == "" { - // Remote details weren't configured. Assuming that https://releases.jfro.io should be used. + // Remote details weren't configured. Assuming that https://releases.jfrog.io should be used. return } server, err = config.GetSpecificConfig(serverID, false, true) - if err != nil { - return - } - fullRemoteRepoPath = getFullExtractorsPathInArtifactory(repoName, remoteEnv, downloadPath) return } @@ -245,9 +248,14 @@ func createHttpClient(artDetails *config.ServerDetails) (rtHttpClient *jfroghttp return } -func getAnalyzerManagerRemoteDetails(downloadPath string) (server *config.ServerDetails, remoteRepo string, err error) { - server, remoteRepo, err = getRemoteDetailsFromEnv(downloadPath) - if remoteRepo != "" || err != nil { +func getAnalyzerManagerRemoteDetails(downloadPath string) (server *config.ServerDetails, fullRemotePath string, err error) { + var remoteRepo string + server, remoteRepo, err = getRemoteDetails(coreutils.ReleasesRemoteEnv) + if err != nil { + return + } + if remoteRepo != "" { + fullRemotePath = path.Join(remoteRepo, "artifactory", downloadPath) return } log.Debug("'" + coreutils.ReleasesRemoteEnv + "' environment variable is not configured. The Analyzer Manager app will be downloaded directly from releases.jfrog.io if needed.") diff --git a/go.mod b/go.mod index 8244563ec..679de66ba 100644 --- a/go.mod +++ b/go.mod @@ -12,7 +12,7 @@ require ( github.com/jedib0t/go-pretty/v6 v6.4.6 github.com/jfrog/build-info-go v1.9.6 github.com/jfrog/gofrog v1.3.0 - github.com/jfrog/jfrog-client-go v1.30.0 + github.com/jfrog/jfrog-client-go v1.30.1 github.com/magiconair/properties v1.8.7 github.com/manifoldco/promptui v0.9.0 github.com/owenrumney/go-sarif/v2 v2.1.3 @@ -94,7 +94,7 @@ require ( gopkg.in/yaml.v3 v3.0.1 // indirect ) -replace github.com/jfrog/jfrog-client-go => github.com/jfrog/jfrog-client-go v1.28.1-0.20230611131847-a3b84a9004c3 +// replace github.com/jfrog/jfrog-client-go => github.com/jfrog/jfrog-client-go v1.28.1-0.20230611131847-a3b84a9004c3 // replace github.com/jfrog/build-info-go => github.com/jfrog/build-info-go v1.8.9-0.20230518114837-fe6a826d5001 diff --git a/go.sum b/go.sum index 0a9e310f3..a0eace64b 100644 --- a/go.sum +++ b/go.sum @@ -198,8 +198,8 @@ github.com/jfrog/build-info-go v1.9.6 h1:lCJ2j5uXAlJsSwDe5J8WD7Co1f/hUlZvMfwfb5A github.com/jfrog/build-info-go v1.9.6/go.mod h1:GbuFS+viHCKZYx9nWHYu7ab1DgQkFdtVN3BJPUNb2D4= github.com/jfrog/gofrog v1.3.0 h1:o4zgsBZE4QyDbz2M7D4K6fXPTBJht+8lE87mS9bw7Gk= github.com/jfrog/gofrog v1.3.0/go.mod h1:IFMc+V/yf7rA5WZ74CSbXe+Lgf0iApEQLxRZVzKRUR0= -github.com/jfrog/jfrog-client-go v1.28.1-0.20230611131847-a3b84a9004c3 h1:bIpljSo/bnilaRky2mtXcljC0JmONgc97AEy1YG6rXE= -github.com/jfrog/jfrog-client-go v1.28.1-0.20230611131847-a3b84a9004c3/go.mod h1:qEJxoe68sUtqHJ1YhXv/7pKYP/9p1D5tJrruzJKYeoI= +github.com/jfrog/jfrog-client-go v1.30.1 h1:wASYBrFkpWzQHTNnCIIfqpDLtQF5oNcwQK9rrv8I8AA= +github.com/jfrog/jfrog-client-go v1.30.1/go.mod h1:qEJxoe68sUtqHJ1YhXv/7pKYP/9p1D5tJrruzJKYeoI= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= diff --git a/utils/config/config.go b/utils/config/config.go index 62b88a4ed..50eb150cb 100644 --- a/utils/config/config.go +++ b/utils/config/config.go @@ -218,7 +218,6 @@ func getConfigFile() (content []byte, err error) { if exists { content, err = fileutils.ReadFile(confFilePath) return - } // Try to look for older config files for i := coreutils.GetCliConfigVersion() - 1; i >= 3; i-- { diff --git a/utils/coreutils/utils.go b/utils/coreutils/utils.go index dc32242ae..a981f7e45 100644 --- a/utils/coreutils/utils.go +++ b/utils/coreutils/utils.go @@ -35,7 +35,7 @@ const ( ) const ( - // ReleasesRemoteEnv should be used for downloading the CLI dependencies (extractor jars, analyzerManager and etc.) through an Artifactory remote + // ReleasesRemoteEnv should be used for downloading the CLI dependencies (extractor jars, analyzerManager etc.) through an Artifactory remote // repository, instead of downloading directly from releases.jfrog.io. The remote repository should be // configured to proxy releases.jfrog.io. // This env var should store a server ID and a remote repository in form of '/' @@ -44,7 +44,8 @@ const ( // Its functionality was similar to ReleasesRemoteEnv, but it proxies releases.jfrog.io/artifactory/oss-release-local instead. DeprecatedExtractorsRemoteEnv = "JFROG_CLI_EXTRACTORS_REMOTE" // JFrog releases URL - JfrogReleasesUrl = "https://releases.jfrog.io/artifactory/" + JfrogReleasesUrl = "https://releases.jfrog.io/artifactory/" + MinimumVersionMsg = "You are using %s version %s, while this operation requires version %s or higher." ) // Error modes (how should the application behave when the CheckError function is invoked): @@ -560,9 +561,7 @@ func GetJfrogTransferDir() (string, error) { func ValidateMinimumVersion(product MinVersionProduct, currentVersion, minimumVersion string) error { if !version.NewVersion(currentVersion).AtLeast(minimumVersion) { - return errorutils.CheckErrorf(fmt.Sprintf("You are using %s version %s,"+ - " while this operation requires version %s or higher.", - product, currentVersion, minimumVersion)) + return errorutils.CheckErrorf(MinimumVersionMsg, product, currentVersion, minimumVersion) } return nil } @@ -571,7 +570,7 @@ func GetServerIdAndRepo(remoteEnv string) (serverID string, repoName string, err serverAndRepo := os.Getenv(remoteEnv) if serverAndRepo == "" { log.Debug(remoteEnv, "is not set") - return "", "", nil + return } // The serverAndRepo is in the form of '/' serverID, repoName, seperatorExists := strings.Cut(serverAndRepo, "/") diff --git a/xray/commands/audit/generic/auditmanager.go b/xray/commands/audit/generic/auditmanager.go index 82ad4b6a9..6534532fb 100644 --- a/xray/commands/audit/generic/auditmanager.go +++ b/xray/commands/audit/generic/auditmanager.go @@ -1,7 +1,12 @@ package audit import ( + "errors" "fmt" + "github.com/jfrog/gofrog/version" + rtutils "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" + "github.com/jfrog/jfrog-cli-core/v2/xray/audit/jas" + "golang.org/x/sync/errgroup" "os" "path/filepath" "strings" @@ -16,7 +21,7 @@ import ( "github.com/jfrog/jfrog-cli-core/v2/xray/audit/nuget" "github.com/jfrog/jfrog-cli-core/v2/xray/audit/python" "github.com/jfrog/jfrog-cli-core/v2/xray/audit/yarn" - "github.com/jfrog/jfrog-cli-core/v2/xray/commands/utils" + commandsutils "github.com/jfrog/jfrog-cli-core/v2/xray/commands/utils" clientUtils "github.com/jfrog/jfrog-cli-core/v2/xray/utils" "github.com/jfrog/jfrog-client-go/auth" "github.com/jfrog/jfrog-client-go/utils/errorutils" @@ -36,7 +41,10 @@ type Params struct { } func NewAuditParams() *Params { - return &Params{} + return &Params{ + xrayGraphScanParams: &services.XrayGraphScanParams{}, + GraphBasicParams: &clientUtils.GraphBasicParams{}, + } } func (params *Params) InstallFunc() func(tech string) error { @@ -60,6 +68,11 @@ func (params *Params) SetXrayGraphScanParams(xrayGraphScanParams *services.XrayG return params } +func (params *Params) SetGraphBasicParams(gbp *clientUtils.GraphBasicParams) *Params { + params.GraphBasicParams = gbp + return params +} + func (params *Params) SetWorkingDirs(workingDirs []string) *Params { params.workingDirs = workingDirs return params @@ -88,9 +101,72 @@ func (params *Params) SetMinSeverityFilter(minSeverityFilter string) *Params { return params } -// GenericAudit audits all the projects found in the given workingDirs -func GenericAudit(params *Params) (results []services.ScanResponse, isMultipleRoot bool, err error) { - if err = coreutils.ValidateMinimumVersion(coreutils.Xray, params.xrayVersion, utils.GraphScanMinXrayVersion); err != nil { +func (params *Params) SetXrayVersion(version string) *Params { + params.xrayVersion = version + return params +} + +// Runs an audit scan based on the provided auditParams. +// Returns an audit Results object containing all the scan results. +// If the current server is entitled for JAS, the advanced security results will be included in the scan results. +func RunAudit(auditParams *Params) (results *Results, err error) { + serverDetails, err := auditParams.ServerDetails() + if err != nil { + return + } + isEntitled, xrayVersion, err := isEntitledForJas(serverDetails) + if err != nil { + return + } + auditParams.SetXrayVersion(xrayVersion) + + errGroup := new(errgroup.Group) + if isEntitled { + // Download (if needed) the analyzer manager in a background routine. + errGroup.Go(rtutils.DownloadAnalyzerManagerIfNeeded) + } + + // The audit scan doesn't require the analyzer manager, so it can run separately from the analyzer manager download routine. + scanResults, isMultipleRootProject, auditError := genericAudit(auditParams) + + // Wait for the Download of the AnalyzerManager to complete. + if err = errGroup.Wait(); err != nil { + return + } + + extendedScanResults := &clientUtils.ExtendedScanResults{XrayResults: scanResults} + // Try to run contextual analysis only if the user is entitled for advance security + if isEntitled { + extendedScanResults, err = jas.GetExtendedScanResults(scanResults, auditParams.FullDependenciesTree(), serverDetails) + if err != nil { + return + } + } + results = &Results{ + IsMultipleRootProject: isMultipleRootProject, + AuditError: auditError, + ExtendedScanResults: extendedScanResults, + } + return +} + +func isEntitledForJas(serverDetails *config.ServerDetails) (entitled bool, xrayVersion string, err error) { + xrayManager, xrayVersion, err := commandsutils.CreateXrayServiceManagerAndGetVersion(serverDetails) + if err != nil { + return + } + if !version.NewVersion(xrayVersion).AtLeast(clientUtils.EntitlementsMinVersion) { + log.Debug("Entitlements check for ‘Advanced Security’ package failed:") + log.Debug(coreutils.MinimumVersionMsg, coreutils.Xray, xrayVersion, clientUtils.EntitlementsMinVersion) + return + } + entitled, err = xrayManager.IsEntitled(clientUtils.ApplicabilityFeatureId) + return +} + +// genericAudit audits all the projects found in the given workingDirs +func genericAudit(params *Params) (results []services.ScanResponse, isMultipleRoot bool, err error) { + if err = coreutils.ValidateMinimumVersion(coreutils.Xray, params.xrayVersion, commandsutils.GraphScanMinXrayVersion); err != nil { return } log.Info("JFrog Xray version is:", params.xrayVersion) @@ -109,10 +185,7 @@ func auditMultipleWorkingDirs(params *Params) (results []services.ScanResponse, return } defer func() { - e := os.Chdir(projectDir) - if err == nil { - err = e - } + err = errors.Join(err, os.Chdir(projectDir)) }() var errorList strings.Builder for _, wd := range params.workingDirs { @@ -151,7 +224,7 @@ func doAudit(params *Params) (results []services.ScanResponse, isMultipleRoot bo // Otherwise, run audit for requested technologies only. technologies := params.Technologies() if len(technologies) == 0 { - technologies, err = utils.DetectedTechnologies() + technologies, err = commandsutils.DetectedTechnologies() if err != nil { return } @@ -171,7 +244,7 @@ func doAudit(params *Params) (results []services.ScanResponse, isMultipleRoot bo continue } - scanGraphParams := utils.NewScanGraphParams(). + scanGraphParams := commandsutils.NewScanGraphParams(). SetServerDetails(serverDetails). SetXrayGraphScanParams(params.xrayGraphScanParams). SetXrayVersion(params.xrayVersion). @@ -198,7 +271,7 @@ func GetTechDependencyTree(params *clientUtils.GraphBasicParams, tech coreutils. } serverDetails, err := params.ServerDetails() if err != nil { - return nil, err + return } var dependencyTrees []*xrayCmdUtils.GraphNode switch tech { diff --git a/xray/commands/audit/generic/generic.go b/xray/commands/audit/generic/generic.go index c8acfc242..88192aec6 100644 --- a/xray/commands/audit/generic/generic.go +++ b/xray/commands/audit/generic/generic.go @@ -1,34 +1,32 @@ package audit import ( - "github.com/jfrog/jfrog-cli-core/v2/xray/audit/jas" "os" - "github.com/jfrog/jfrog-cli-core/v2/artifactory/utils" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" - commandsutils "github.com/jfrog/jfrog-cli-core/v2/xray/commands/utils" xrutils "github.com/jfrog/jfrog-cli-core/v2/xray/utils" - "github.com/jfrog/jfrog-client-go/utils/log" "github.com/jfrog/jfrog-client-go/xray/services" - "golang.org/x/sync/errgroup" ) type GenericAuditCommand struct { watches []string - workingDirs []string projectKey string targetRepoPath string - minSeverityFilter string - fixableOnly bool IncludeVulnerabilities bool IncludeLicenses bool Fail bool PrintExtendedTable bool - *xrutils.GraphBasicParams + Params +} + +type Results struct { + IsMultipleRootProject bool + AuditError error + ExtendedScanResults *xrutils.ExtendedScanResults } func NewGenericAuditCommand() *GenericAuditCommand { - return &GenericAuditCommand{GraphBasicParams: &xrutils.GraphBasicParams{}} + return &GenericAuditCommand{Params: *NewAuditParams()} } func (auditCmd *GenericAuditCommand) SetWatches(watches []string) *GenericAuditCommand { @@ -36,11 +34,6 @@ func (auditCmd *GenericAuditCommand) SetWatches(watches []string) *GenericAuditC return auditCmd } -func (auditCmd *GenericAuditCommand) SetWorkingDirs(dirs []string) *GenericAuditCommand { - auditCmd.workingDirs = dirs - return auditCmd -} - func (auditCmd *GenericAuditCommand) SetProject(project string) *GenericAuditCommand { auditCmd.projectKey = project return auditCmd @@ -71,16 +64,6 @@ func (auditCmd *GenericAuditCommand) SetPrintExtendedTable(printExtendedTable bo return auditCmd } -func (auditCmd *GenericAuditCommand) SetMinSeverityFilter(minSeverityFilter string) *GenericAuditCommand { - auditCmd.minSeverityFilter = minSeverityFilter - return auditCmd -} - -func (auditCmd *GenericAuditCommand) SetFixableOnly(fixable bool) *GenericAuditCommand { - auditCmd.fixableOnly = fixable - return auditCmd -} - func (auditCmd *GenericAuditCommand) CreateXrayGraphScanParams() *services.XrayGraphScanParams { params := &services.XrayGraphScanParams{ RepoPath: auditCmd.targetRepoPath, @@ -102,78 +85,43 @@ func (auditCmd *GenericAuditCommand) Run() (err error) { SetXrayGraphScanParams(auditCmd.CreateXrayGraphScanParams()). SetWorkingDirs(auditCmd.workingDirs). SetMinSeverityFilter(auditCmd.minSeverityFilter). - SetFixableOnly(auditCmd.fixableOnly) - auditParams.GraphBasicParams = auditCmd.GraphBasicParams - - serverDetails, err := auditParams.ServerDetails() - if err != nil { - return err - } - xrayManager, xrayVersion, err := commandsutils.CreateXrayServiceManagerAndGetVersion(serverDetails) + SetFixableOnly(auditCmd.fixableOnly). + SetGraphBasicParams(auditCmd.GraphBasicParams) + auditResults, err := RunAudit(auditParams) if err != nil { return err } - auditParams.xrayVersion = xrayVersion - var entitled bool - errGroup := new(errgroup.Group) - if err = coreutils.ValidateMinimumVersion(coreutils.Xray, xrayVersion, xrutils.EntitlementsMinVersion); err == nil { - entitled, err = xrayManager.IsEntitled(xrutils.ApplicabilityFeatureId) - if err != nil { - return err - } - } else { - entitled = false - log.Debug("Entitlements check for ‘Advanced Security’ package failed:\n" + err.Error()) - } - if entitled { - // Download (if needed) the analyzer manager in a background routine. - errGroup.Go(utils.DownloadAnalyzerManagerIfNeeded) - } - results, isMultipleRootProject, auditErr := GenericAudit(auditParams) - - // Wait for the Download of the AnalyzerManager to complete. - if err = errGroup.Wait(); err != nil { - return err - } - extendedScanResults := &xrutils.ExtendedScanResults{XrayResults: results, ApplicabilityScanResults: nil, EntitledForJas: false} - // Try to run contextual analysis only if the user is entitled for advance security - if entitled { - extendedScanResults, err = jas.GetExtendedScanResults(results, auditParams.FullDependenciesTree(), serverDetails) - if err != nil { - return err - } - } if auditCmd.Progress() != nil { if err = auditCmd.Progress().Quit(); err != nil { return } } var messages []string - if !entitled { - messages = []string{coreutils.PrintTitle("The ‘jf audit’ command also supports the ‘Contextual Analysis’ feature, which is included as part of the ‘Advanced Security’ package. This package isn't enabled on your system. Read more - ") + coreutils.PrintLink("https://jfrog.com/security-and-compliance")} + if !auditResults.ExtendedScanResults.EntitledForJas { + messages = []string{coreutils.PrintTitle("The ‘jf audit’ command also supports the ‘Contextual Analysis’ feature, which is included as part of the ‘Advanced Security’ package. This package isn't enabled on your system. Read more - ") + coreutils.PrintLink("https://jfrog.com/xray/")} } // Print Scan results on all cases except if errors accrued on Generic Audit command and no security/license issues found. - printScanResults := !(auditErr != nil && xrutils.IsEmptyScanResponse(results)) + printScanResults := !(auditResults.AuditError != nil && xrutils.IsEmptyScanResponse(auditResults.ExtendedScanResults.XrayResults)) if printScanResults { - err = xrutils.PrintScanResults(extendedScanResults, + err = xrutils.PrintScanResults(auditResults.ExtendedScanResults, nil, auditCmd.OutputFormat(), auditCmd.IncludeVulnerabilities, auditCmd.IncludeLicenses, - isMultipleRootProject, + auditResults.IsMultipleRootProject, auditCmd.PrintExtendedTable, false, messages, ) if err != nil { return } } - if auditErr != nil { - err = auditErr + if auditResults.AuditError != nil { + err = auditResults.AuditError return } - // Only in case Xray's context was given (!auditCmd.IncludeVulnerabilities) and the user asked to fail the build accordingly, do so. - if auditCmd.Fail && !auditCmd.IncludeVulnerabilities && xrutils.CheckIfFailBuild(results) { + // Only in case Xray's context was given (!auditCmd.IncludeVulnerabilities), and the user asked to fail the build accordingly, do so. + if auditCmd.Fail && !auditCmd.IncludeVulnerabilities && xrutils.CheckIfFailBuild(auditResults.ExtendedScanResults.XrayResults) { err = xrutils.NewFailBuildError() } return diff --git a/xray/commands/utils/utils_test.go b/xray/commands/utils/utils_test.go index 88487da59..de26cb7ba 100644 --- a/xray/commands/utils/utils_test.go +++ b/xray/commands/utils/utils_test.go @@ -72,7 +72,7 @@ func TestFilterResultIfNeeded(t *testing.T) { }, }, params: ScanGraphParams{ - severityLevel: 3, + severityLevel: 8, }, expected: services.ScanResponse{ Violations: []services.Violation{ diff --git a/xray/formats/conversion.go b/xray/formats/conversion.go index 570b1dd3b..88e2c2612 100644 --- a/xray/formats/conversion.go +++ b/xray/formats/conversion.go @@ -147,7 +147,6 @@ func ConvertToSecretsTableRow(rows []IacSecretsRow) (tableRows []secretsTableRow file: rows[i].File, lineColumn: rows[i].LineColumn, text: rows[i].Text, - secretType: rows[i].Type, }) } return @@ -160,7 +159,6 @@ func ConvertToIacTableRow(rows []IacSecretsRow) (tableRows []iacTableRow) { file: rows[i].File, lineColumn: rows[i].LineColumn, text: rows[i].Text, - iacType: rows[i].Type, }) } return diff --git a/xray/formats/table.go b/xray/formats/table.go index 2fc3c389c..44fe79f20 100644 --- a/xray/formats/table.go +++ b/xray/formats/table.go @@ -128,7 +128,6 @@ type secretsTableRow struct { file string `col-name:"File"` lineColumn string `col-name:"Line:Column"` text string `col-name:"Secret"` - secretType string `col-name:"Type"` } type iacTableRow struct { @@ -136,5 +135,4 @@ type iacTableRow struct { file string `col-name:"File"` lineColumn string `col-name:"Line:Column"` text string `col-name:"Finding"` - iacType string `col-name:"Scanner"` } diff --git a/xray/utils/analyzermanager.go b/xray/utils/analyzermanager.go index e9181084e..b21c24937 100644 --- a/xray/utils/analyzermanager.go +++ b/xray/utils/analyzermanager.go @@ -2,7 +2,6 @@ package utils import ( "errors" - "fmt" "github.com/jfrog/jfrog-cli-core/v2/utils/config" "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" "github.com/jfrog/jfrog-client-go/utils/errorutils" @@ -12,6 +11,7 @@ import ( "github.com/owenrumney/go-sarif/v2/sarif" "os" "os/exec" + "path" "path/filepath" "strconv" "strings" @@ -26,7 +26,8 @@ const ( EntitlementsMinVersion = "3.66.5" ApplicabilityFeatureId = "contextual_analysis" AnalyzerManagerZipName = "analyzerManager.zip" - analyzerManagerDownloadPath = "xsc-gen-exe-analyzer-manager-local/v1/[RELEASE]" + analyzerManagerVersion = "1.1.9.1786834" + analyzerManagerDownloadPath = "xsc-gen-exe-analyzer-manager-local/v1" analyzerManagerDirName = "analyzerManager" analyzerManagerExecutableName = "analyzerManager" analyzerManagerLogDirName = "analyzerManagerLogs" @@ -123,7 +124,7 @@ func GetAnalyzerManagerDownloadPath() (string, error) { if err != nil { return "", err } - return fmt.Sprintf("%s/%s/%s", analyzerManagerDownloadPath, osAndArc, AnalyzerManagerZipName), nil + return path.Join(analyzerManagerDownloadPath, analyzerManagerVersion, osAndArc, AnalyzerManagerZipName), nil } func GetAnalyzerManagerDirAbsolutePath() (string, error) { diff --git a/xray/utils/resultstable.go b/xray/utils/resultstable.go index 02a8644e1..39af61f15 100644 --- a/xray/utils/resultstable.go +++ b/xray/utils/resultstable.go @@ -153,14 +153,7 @@ func prepareViolations(violations []services.Violation, extendedResults *Extende } // Sort the rows by severity and whether the row contains fixed versions - sort.Slice(securityViolationsRows, func(i, j int) bool { - if securityViolationsRows[i].SeverityNumValue != securityViolationsRows[j].SeverityNumValue { - return securityViolationsRows[i].SeverityNumValue > securityViolationsRows[j].SeverityNumValue - } else if securityViolationsRows[i].Applicable != securityViolationsRows[j].Applicable { - return sortByApplicableValue(i, j, securityViolationsRows) - } - return len(securityViolationsRows[i].FixedVersions) > 0 && len(securityViolationsRows[j].FixedVersions) > 0 - }) + sortVulnerabilityOrViolationRows(securityViolationsRows) sort.Slice(licenseViolationsRows, func(i, j int) bool { return licenseViolationsRows[i].SeverityNumValue > licenseViolationsRows[j].SeverityNumValue }) @@ -231,15 +224,17 @@ func prepareVulnerabilities(vulnerabilities []services.Vulnerability, extendedRe } } - sort.Slice(vulnerabilitiesRows, func(i, j int) bool { - if vulnerabilitiesRows[i].SeverityNumValue != vulnerabilitiesRows[j].SeverityNumValue { - return vulnerabilitiesRows[i].SeverityNumValue > vulnerabilitiesRows[j].SeverityNumValue - } else if vulnerabilitiesRows[i].Applicable != vulnerabilitiesRows[j].Applicable { - sortByApplicableValue(i, j, vulnerabilitiesRows) + sortVulnerabilityOrViolationRows(vulnerabilitiesRows) + return vulnerabilitiesRows, nil +} + +func sortVulnerabilityOrViolationRows(rows []formats.VulnerabilityOrViolationRow) { + sort.Slice(rows, func(i, j int) bool { + if rows[i].SeverityNumValue != rows[j].SeverityNumValue { + return rows[i].SeverityNumValue > rows[j].SeverityNumValue } - return len(vulnerabilitiesRows[i].FixedVersions) > 0 && len(vulnerabilitiesRows[j].FixedVersions) > 0 + return len(rows[i].FixedVersions) > 0 && len(rows[j].FixedVersions) > 0 }) - return vulnerabilitiesRows, nil } // PrintLicensesTable prints the licenses in a table. @@ -350,8 +345,8 @@ func prepareIacs(iacs []IacOrSecretResult, isTable bool) []formats.IacSecretsRow func PrintIacTable(iacs []IacOrSecretResult, entitledForIacScan bool) error { if entitledForIacScan { iacRows := prepareIacs(iacs, true) - return coreutils.PrintTable(formats.ConvertToIacTableRow(iacRows), "Iac Violations", - "✨ No Iac violations were found ✨", false) + return coreutils.PrintTable(formats.ConvertToIacTableRow(iacRows), "Infrastructure as Code Vulnerabilities", + "✨ No Infrastructure as Code vulnerabilities were found ✨", false) } return nil } @@ -534,20 +529,24 @@ func (s *Severity) printableTitle(isTable bool) string { var Severities = map[string]map[string]*Severity{ "Critical": { - ApplicableStringValue: {emoji: "💀", title: "Critical", numValue: 4, style: color.New(color.BgLightRed, color.LightWhite)}, - NotApplicableStringValue: {emoji: "👌", title: "Critical", numValue: 4}, + ApplicableStringValue: {emoji: "💀", title: "Critical", numValue: 12, style: color.New(color.BgLightRed, color.LightWhite)}, + ApplicabilityUndeterminedStringValue: {emoji: "💀", title: "Critical", numValue: 11, style: color.New(color.BgLightRed, color.LightWhite)}, + NotApplicableStringValue: {emoji: "👌", title: "Critical", numValue: 10}, }, "High": { - ApplicableStringValue: {emoji: "🔥", title: "High", numValue: 3, style: color.New(color.Red)}, - NotApplicableStringValue: {emoji: "👌", title: "High", numValue: 3}, + ApplicableStringValue: {emoji: "🔥", title: "High", numValue: 9, style: color.New(color.Red)}, + ApplicabilityUndeterminedStringValue: {emoji: "🔥", title: "High", numValue: 8, style: color.New(color.Red)}, + NotApplicableStringValue: {emoji: "👌", title: "High", numValue: 7}, }, "Medium": { - ApplicableStringValue: {emoji: "🎃", title: "Medium", numValue: 2, style: color.New(color.Yellow)}, - NotApplicableStringValue: {emoji: "👌", title: "Medium", numValue: 2}, + ApplicableStringValue: {emoji: "🎃", title: "Medium", numValue: 6, style: color.New(color.Yellow)}, + ApplicabilityUndeterminedStringValue: {emoji: "🎃", title: "Medium", numValue: 5, style: color.New(color.Yellow)}, + NotApplicableStringValue: {emoji: "👌", title: "Medium", numValue: 4}, }, "Low": { - ApplicableStringValue: {emoji: "👻", title: "Low", numValue: 1}, - NotApplicableStringValue: {emoji: "👌", title: "Low", numValue: 1}, + ApplicableStringValue: {emoji: "👻", title: "Low", numValue: 3}, + ApplicabilityUndeterminedStringValue: {emoji: "👻", title: "Low", numValue: 2}, + NotApplicableStringValue: {emoji: "👌", title: "Low", numValue: 1}, }, } @@ -555,6 +554,10 @@ func (s *Severity) NumValue() int { return s.numValue } +func (s *Severity) Emoji() string { + return s.emoji +} + func GetSeveritiesFormat(severity string) (string, error) { formattedSeverity := cases.Title(language.Und).String(severity) if formattedSeverity != "" && Severities[formattedSeverity][ApplicableStringValue] == nil { @@ -568,10 +571,15 @@ func GetSeverity(severityTitle string, applicable string) *Severity { if Severities[severityTitle] == nil { return &Severity{title: severityTitle} } - if applicable == NotApplicableStringValue { + + switch applicable { + case NotApplicableStringValue: return Severities[severityTitle][NotApplicableStringValue] + case ApplicableStringValue: + return Severities[severityTitle][ApplicableStringValue] + default: + return Severities[severityTitle][ApplicabilityUndeterminedStringValue] } - return Severities[severityTitle][ApplicableStringValue] } type operationalRiskViolationReadableData struct { @@ -635,11 +643,13 @@ func simplifyVulnerabilities(scanVulnerabilities []services.Vulnerability, multi continue } uniqueVulnerabilities[packageKey] = &services.Vulnerability{ - Cves: vulnerability.Cves, - Severity: vulnerability.Severity, - Components: map[string]services.Component{vulnerableComponentId: vulnerability.Components[vulnerableComponentId]}, - IssueId: vulnerability.IssueId, - Technology: vulnerability.Technology, + Cves: vulnerability.Cves, + Severity: vulnerability.Severity, + Components: map[string]services.Component{vulnerableComponentId: vulnerability.Components[vulnerableComponentId]}, + IssueId: vulnerability.IssueId, + Technology: vulnerability.Technology, + ExtendedInformation: vulnerability.ExtendedInformation, + Summary: vulnerability.Summary, } } } @@ -793,7 +803,7 @@ func getUniqueKey(vulnerableDependency, vulnerableVersion string, cves []service // If at least one cve is applicable - final value is applicable // Else if at least one cve is undetermined - final value is undetermined -// Else (case when all cves are not applicable) -> final value is not applicable +// Else (case when all cves aren't applicable) -> final value is not applicable func getApplicableCveValue(extendedResults *ExtendedScanResults, xrayCves []formats.CveRow) string { if !extendedResults.EntitledForJas { return "" @@ -819,15 +829,6 @@ func getApplicableCveValue(extendedResults *ExtendedScanResults, xrayCves []form return ApplicabilityUndeterminedStringValue } -func getApplicableCveNumValue(stringValue string) int { - if stringValue == ApplicableStringValue { - return 3 - } else if stringValue == ApplicabilityUndeterminedStringValue { - return 2 - } - return 1 -} - func printApplicableCveValue(applicableValue string, isTable bool) string { if applicableValue == ApplicableStringValue && isTable && (log.IsStdOutTerminal() && log.IsColorsSupported() || os.Getenv("GITLAB_CI") != "") { @@ -835,8 +836,3 @@ func printApplicableCveValue(applicableValue string, isTable bool) string { } return applicableValue } - -func sortByApplicableValue(i int, j int, securityViolationsRows []formats.VulnerabilityOrViolationRow) bool { - return getApplicableCveNumValue(securityViolationsRows[i].Applicable) > - getApplicableCveNumValue(securityViolationsRows[j].Applicable) -} diff --git a/xray/utils/resultstable_test.go b/xray/utils/resultstable_test.go index cc16a108e..ec260c1d6 100644 --- a/xray/utils/resultstable_test.go +++ b/xray/utils/resultstable_test.go @@ -466,6 +466,108 @@ func TestGetApplicableCveValue(t *testing.T) { } } +func TestSortVulnerabilityOrViolationRows(t *testing.T) { + testCases := []struct { + name string + rows []formats.VulnerabilityOrViolationRow + expectedOrder []string + }{ + { + name: "Sort by severity with different severity values", + rows: []formats.VulnerabilityOrViolationRow{ + { + Summary: "Summary 1", + Severity: "High", + SeverityNumValue: 9, + FixedVersions: []string{}, + ImpactedDependencyName: "Dependency 1", + ImpactedDependencyVersion: "1.0.0", + }, + { + Summary: "Summary 2", + Severity: "Critical", + SeverityNumValue: 12, + FixedVersions: []string{"1.0.0"}, + ImpactedDependencyName: "Dependency 2", + ImpactedDependencyVersion: "2.0.0", + }, + { + Summary: "Summary 3", + Severity: "Medium", + SeverityNumValue: 6, + FixedVersions: []string{}, + ImpactedDependencyName: "Dependency 3", + ImpactedDependencyVersion: "3.0.0", + }, + }, + expectedOrder: []string{"Dependency 2", "Dependency 1", "Dependency 3"}, + }, + { + name: "Sort by severity with same severity values, but different fixed versions", + rows: []formats.VulnerabilityOrViolationRow{ + { + Summary: "Summary 1", + Severity: "Critical", + SeverityNumValue: 12, + FixedVersions: []string{"1.0.0"}, + ImpactedDependencyName: "Dependency 1", + ImpactedDependencyVersion: "1.0.0", + }, + { + Summary: "Summary 2", + Severity: "Critical", + SeverityNumValue: 12, + FixedVersions: []string{}, + ImpactedDependencyName: "Dependency 2", + ImpactedDependencyVersion: "2.0.0", + }, + }, + expectedOrder: []string{"Dependency 1", "Dependency 2"}, + }, + { + name: "Sort by severity with same severity values different applicability", + rows: []formats.VulnerabilityOrViolationRow{ + { + Summary: "Summary 1", + Severity: "Critical", + Applicable: ApplicableStringValue, + SeverityNumValue: 13, + FixedVersions: []string{"1.0.0"}, + ImpactedDependencyName: "Dependency 1", + ImpactedDependencyVersion: "1.0.0", + }, + { + Summary: "Summary 2", + Applicable: NotApplicableStringValue, + Severity: "Critical", + SeverityNumValue: 11, + ImpactedDependencyName: "Dependency 2", + ImpactedDependencyVersion: "2.0.0", + }, + { + Summary: "Summary 3", + Applicable: ApplicabilityUndeterminedStringValue, + Severity: "Critical", + SeverityNumValue: 12, + ImpactedDependencyName: "Dependency 3", + ImpactedDependencyVersion: "2.0.0", + }, + }, + expectedOrder: []string{"Dependency 1", "Dependency 3", "Dependency 2"}, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + sortVulnerabilityOrViolationRows(tc.rows) + + for i, row := range tc.rows { + assert.Equal(t, tc.expectedOrder[i], row.ImpactedDependencyName) + } + }) + } +} + func newBoolPtr(v bool) *bool { return &v } diff --git a/xray/utils/resultwriter.go b/xray/utils/resultwriter.go index 3ed4116eb..90cc2fafc 100644 --- a/xray/utils/resultwriter.go +++ b/xray/utils/resultwriter.go @@ -4,6 +4,7 @@ import ( "bytes" "encoding/json" "fmt" + "os" "strconv" "strings" @@ -35,10 +36,16 @@ var OutputFormats = []string{string(Table), string(Json), string(SimpleJson), st var CurationOutputFormats = []string{string(Table), string(Json)} type sarifProperties struct { - Cves string - Headline string - Severity string - Description string + Applicable string + Cves string + Headline string + Severity string + Description string + MarkdownDescription string + XrayID string + File string + LineColumn string + SecretsOrIacType string } // PrintScanResults prints the scan results in the specified format. @@ -58,7 +65,7 @@ func PrintScanResults(results *ExtendedScanResults, simpleJsonError []formats.Si case Table: return printScanResultsTables(results, scan, includeVulnerabilities, includeLicenses, isMultipleRoots, printExtended, messages) case SimpleJson: - jsonTable, err := convertScanToSimpleJson(results.getXrayScanResults(), results, simpleJsonError, isMultipleRoots, includeLicenses, false) + jsonTable, err := convertScanToSimpleJson(results, simpleJsonError, isMultipleRoots, includeLicenses, false) if err != nil { return err } @@ -66,7 +73,7 @@ func PrintScanResults(results *ExtendedScanResults, simpleJsonError []formats.Si case Json: return PrintJson(results.getXrayScanResults()) case Sarif: - sarifFile, err := GenerateSarifFileFromScan(results.getXrayScanResults(), results, isMultipleRoots, false) + sarifFile, err := GenerateSarifFileFromScan(results, isMultipleRoots, false, "JFrog Security", coreutils.JFrogComUrl+"xray/") if err != nil { return err } @@ -117,13 +124,13 @@ func printMessage(message string) { log.Output("💬", message) } -func GenerateSarifFileFromScan(currentScan []services.ScanResponse, extendedResults *ExtendedScanResults, isMultipleRoots, simplifiedOutput bool) (string, error) { +func GenerateSarifFileFromScan(extendedResults *ExtendedScanResults, isMultipleRoots, markdownOutput bool, scanningTool, toolURI string) (string, error) { report, err := sarif.New(sarif.Version210) if err != nil { return "", errorutils.CheckError(err) } - run := sarif.NewRunWithInformationURI("JFrog Xray", coreutils.JFrogComUrl+"xray/") - if err = convertScanToSarif(run, currentScan, extendedResults, isMultipleRoots, simplifiedOutput); err != nil { + run := sarif.NewRunWithInformationURI(scanningTool, toolURI) + if err = convertScanToSarif(run, extendedResults, isMultipleRoots, markdownOutput); err != nil { return "", err } report.AddRun(run) @@ -135,8 +142,8 @@ func GenerateSarifFileFromScan(currentScan []services.ScanResponse, extendedResu return clientUtils.IndentJson(out), nil } -func convertScanToSimpleJson(results []services.ScanResponse, extendedResults *ExtendedScanResults, errors []formats.SimpleJsonError, isMultipleRoots, includeLicenses, simplifiedOutput bool) (formats.SimpleJsonResults, error) { - violations, vulnerabilities, licenses := SplitScanResults(results) +func convertScanToSimpleJson(extendedResults *ExtendedScanResults, errors []formats.SimpleJsonError, isMultipleRoots, includeLicenses, simplifiedOutput bool) (formats.SimpleJsonResults, error) { + violations, vulnerabilities, licenses := SplitScanResults(extendedResults.XrayResults) jsonTable := formats.SimpleJsonResults{} if len(vulnerabilities) > 0 { vulJsonTable, err := PrepareVulnerabilities(vulnerabilities, extendedResults, isMultipleRoots, simplifiedOutput) @@ -174,16 +181,77 @@ func convertScanToSimpleJson(results []services.ScanResponse, extendedResults *E return jsonTable, nil } -func convertScanToSarif(run *sarif.Run, currentScan []services.ScanResponse, extendedResults *ExtendedScanResults, isMultipleRoots, simplifiedOutput bool) error { +func convertScanToSarif(run *sarif.Run, extendedResults *ExtendedScanResults, isMultipleRoots, markdownOutput bool) error { var errors []formats.SimpleJsonError - jsonTable, err := convertScanToSimpleJson(currentScan, extendedResults, errors, isMultipleRoots, false, simplifiedOutput) + jsonTable, err := convertScanToSimpleJson(extendedResults, errors, isMultipleRoots, true, markdownOutput) if err != nil { return err } + if len(jsonTable.Vulnerabilities) > 0 || len(jsonTable.SecurityViolations) > 0 { + if err = convertToVulnerabilityOrViolationSarif(run, &jsonTable, markdownOutput); err != nil { + return err + } + } + return convertToIacOrSecretsSarif(run, &jsonTable, markdownOutput) +} + +func convertToVulnerabilityOrViolationSarif(run *sarif.Run, jsonTable *formats.SimpleJsonResults, markdownOutput bool) error { if len(jsonTable.SecurityViolations) > 0 { - return convertViolations(jsonTable, run, simplifiedOutput) + return convertViolationsToSarif(jsonTable, run, markdownOutput) + } + return convertVulnerabilitiesToSarif(jsonTable, run, markdownOutput) +} + +func convertToIacOrSecretsSarif(run *sarif.Run, jsonTable *formats.SimpleJsonResults, markdownOutput bool) error { + var err error + for _, secret := range jsonTable.Secrets { + properties := getIacOrSecretsProperties(secret, markdownOutput, true) + if err = addPropertiesToSarifRun(run, &properties); err != nil { + return err + } + } + + for _, iac := range jsonTable.Iacs { + properties := getIacOrSecretsProperties(iac, markdownOutput, false) + if err = addPropertiesToSarifRun(run, &properties); err != nil { + return err + } + } + return err +} + +func getIacOrSecretsProperties(secretOrIac formats.IacSecretsRow, markdownOutput, isSecret bool) sarifProperties { + file := strings.TrimPrefix(secretOrIac.File, string(os.PathSeparator)) + mapSeverityToScore := map[string]string{ + "": "0.0", + "low": "3.9", + "medium": "6.9", + "high": "8.9", + "critical": "10", + } + severity := mapSeverityToScore[strings.ToLower(secretOrIac.Severity)] + markdownDescription := "" + headline := "Infrastructure as Code Vulnerability" + secretOrFinding := "Finding" + if isSecret { + secretOrFinding = "Secret" + headline = "Potential Secret Exposed" + } + if markdownOutput { + headerRow := fmt.Sprintf("| Severity | File | Line:Column | %s |\n", secretOrFinding) + separatorRow := "| :---: | :---: | :---: | :---: |\n" + tableHeader := headerRow + separatorRow + markdownDescription = tableHeader + fmt.Sprintf("| %s | %s | %s | %s |", secretOrIac.Severity, file, secretOrIac.LineColumn, secretOrIac.Text) + } + return sarifProperties{ + Headline: headline, + Severity: severity, + Description: secretOrIac.Text, + MarkdownDescription: markdownDescription, + File: file, + LineColumn: secretOrIac.LineColumn, + SecretsOrIacType: secretOrIac.Type, } - return convertVulnerabilities(jsonTable, run, simplifiedOutput) } func getCves(cvesRow []formats.CveRow, issueId string) string { @@ -202,28 +270,25 @@ func getCves(cvesRow []formats.CveRow, issueId string) string { return cvesStr } -func getHeadline(impactedPackage, version, key, fixVersion string) string { - if fixVersion != "" { - return fmt.Sprintf("[%s] Upgrade %s:%s to %s", key, impactedPackage, version, fixVersion) - } - return fmt.Sprintf("[%s] %s:%s", key, impactedPackage, version) +func getVulnerabilityOrViolationSarifHeadline(depName, version, key string) string { + return fmt.Sprintf("[%s] %s %s", key, depName, version) } -func convertViolations(jsonTable formats.SimpleJsonResults, run *sarif.Run, simplifiedOutput bool) error { +func convertViolationsToSarif(jsonTable *formats.SimpleJsonResults, run *sarif.Run, markdownOutput bool) error { for _, violation := range jsonTable.SecurityViolations { - sarifProperties, err := getSarifProperties(violation, simplifiedOutput) + properties, err := getViolatedDepsSarifProps(violation, markdownOutput) if err != nil { return err } - err = addScanResultsToSarifRun(run, sarifProperties.Severity, violation.IssueId, sarifProperties.Headline, sarifProperties.Description, violation.Technology) - if err != nil { + if err = addPropertiesToSarifRun(run, &properties); err != nil { return err } } for _, license := range jsonTable.LicensesViolations { - impactedPackageFull := getHeadline(license.ImpactedDependencyName, license.ImpactedDependencyVersion, license.LicenseKey, "") - err := addScanResultsToSarifRun(run, "", license.ImpactedDependencyVersion, impactedPackageFull, license.LicenseKey, coreutils.Technology(strings.ToLower(license.ImpactedDependencyType))) - if err != nil { + if err := addPropertiesToSarifRun(run, + &sarifProperties{ + Severity: license.Severity, + Headline: getVulnerabilityOrViolationSarifHeadline(license.LicenseKey, license.ImpactedDependencyName, license.ImpactedDependencyVersion)}); err != nil { return err } } @@ -231,35 +296,39 @@ func convertViolations(jsonTable formats.SimpleJsonResults, run *sarif.Run, simp return nil } -func getSarifProperties(vulnerabilityRow formats.VulnerabilityOrViolationRow, simplifiedOutput bool) (sarifProperties, error) { +func getViolatedDepsSarifProps(vulnerabilityRow formats.VulnerabilityOrViolationRow, markdownOutput bool) (sarifProperties, error) { cves := getCves(vulnerabilityRow.Cves, vulnerabilityRow.IssueId) - fixVersion := getMinimalFixVersion(vulnerabilityRow.FixedVersions) - headline := getHeadline(vulnerabilityRow.ImpactedDependencyName, vulnerabilityRow.ImpactedDependencyVersion, cves, fixVersion) + headline := getVulnerabilityOrViolationSarifHeadline(vulnerabilityRow.ImpactedDependencyName, vulnerabilityRow.ImpactedDependencyVersion, cves) maxCveScore, err := findMaxCVEScore(vulnerabilityRow.Cves) if err != nil { return sarifProperties{}, err } - formattedDirectDependecies := getDirectDependenciesFormatted(vulnerabilityRow.Components) - description := vulnerabilityRow.Summary - if simplifiedOutput { - description = getDescription(formattedDirectDependecies, maxCveScore, vulnerabilityRow.FixedVersions) + formattedDirectDependencies, err := getDirectDependenciesFormatted(vulnerabilityRow.Components) + if err != nil { + return sarifProperties{}, err + } + markdownDescription := "" + if markdownOutput { + markdownDescription = getSarifTableDescription(formattedDirectDependencies, maxCveScore, vulnerabilityRow.Applicable, vulnerabilityRow.FixedVersions) + "\n" } return sarifProperties{ - Cves: cves, - Headline: headline, - Severity: maxCveScore, - Description: description, + Applicable: vulnerabilityRow.Applicable, + Cves: cves, + Headline: headline, + Severity: maxCveScore, + Description: vulnerabilityRow.Summary, + MarkdownDescription: markdownDescription, + File: vulnerabilityRow.Technology.GetPackageDescriptor(), }, err } -func convertVulnerabilities(jsonTable formats.SimpleJsonResults, run *sarif.Run, simplifiedOutput bool) error { +func convertVulnerabilitiesToSarif(jsonTable *formats.SimpleJsonResults, run *sarif.Run, simplifiedOutput bool) error { for _, vulnerability := range jsonTable.Vulnerabilities { - sarifProperties, err := getSarifProperties(vulnerability, simplifiedOutput) + properties, err := getViolatedDepsSarifProps(vulnerability, simplifiedOutput) if err != nil { return err } - err = addScanResultsToSarifRun(run, sarifProperties.Severity, vulnerability.IssueId, sarifProperties.Headline, sarifProperties.Description, vulnerability.Technology) - if err != nil { + if err = addPropertiesToSarifRun(run, &properties); err != nil { return err } } @@ -267,54 +336,83 @@ func convertVulnerabilities(jsonTable formats.SimpleJsonResults, run *sarif.Run, return nil } -func getDirectDependenciesFormatted(directDependencies []formats.ComponentRow) string { +func getDirectDependenciesFormatted(directDependencies []formats.ComponentRow) (string, error) { var formattedDirectDependencies strings.Builder for _, dependency := range directDependencies { - formattedDirectDependencies.WriteString(fmt.Sprintf("`%s:%s`, ", dependency.Name, dependency.Version)) + if _, err := formattedDirectDependencies.WriteString(fmt.Sprintf("`%s %s`
", dependency.Name, dependency.Version)); err != nil { + return "", err + } } - return strings.TrimSuffix(formattedDirectDependencies.String(), ", ") + return strings.TrimSuffix(formattedDirectDependencies.String(), "
"), nil } -func getDescription(formattedDirectDependencies, maxCveScore string, fixedVersions []string) string { +func getSarifTableDescription(formattedDirectDependencies, maxCveScore, applicable string, fixedVersions []string) string { descriptionFixVersions := "No fix available" if len(fixedVersions) > 0 { descriptionFixVersions = strings.Join(fixedVersions, ", ") } - return fmt.Sprintf("| Severity Score | Direct Dependencies | Fixed Versions |\n| :--- | :----: | ---: |\n| %s | %s | %s |", - maxCveScore, formattedDirectDependencies, descriptionFixVersions) -} - -func getMinimalFixVersion(fixVersions []string) string { - if len(fixVersions) > 0 { - return fixVersions[0] + if applicable == "" { + return fmt.Sprintf("| Severity Score | Direct Dependencies | Fixed Versions |\n| :---: | :----: | :---: |\n| %s | %s | %s |", + maxCveScore, formattedDirectDependencies, descriptionFixVersions) } - return "" + return fmt.Sprintf("| Severity Score | Contextual Analysis | Direct Dependencies | Fixed Versions |\n| :---: | :---: | :---: | :---: |\n| %s | %s | %s | %s |", + maxCveScore, applicable, formattedDirectDependencies, descriptionFixVersions) } // Adding the Xray scan results details to the sarif struct, for each issue found in the scan -func addScanResultsToSarifRun(run *sarif.Run, severity, issueId, impactedPackage, description string, technology coreutils.Technology) error { - techPackageDescriptor := technology.GetPackageDescriptor() +func addPropertiesToSarifRun(run *sarif.Run, properties *sarifProperties) error { pb := sarif.NewPropertyBag() - if severity != missingCveScore { - pb.Add("security-severity", severity) + if properties.Severity != missingCveScore { + pb.Add("security-severity", properties.Severity) + } + description := properties.Description + markdownDescription := properties.MarkdownDescription + if markdownDescription != "" { + description = "" + } + line := 0 + column := 0 + var err error + if properties.LineColumn != "" { + lineColumn := strings.Split(properties.LineColumn, ":") + if line, err = strconv.Atoi(lineColumn[0]); err != nil { + return err + } + if column, err = strconv.Atoi(lineColumn[1]); err != nil { + return err + } } - run.AddRule(issueId). + ruleID := generateSarifRuleID(properties) + run.AddRule(ruleID). + WithDescription(description). WithProperties(pb.Properties). - WithMarkdownHelp(description) - run.CreateResultForRule(issueId). - WithMessage(sarif.NewTextMessage(impactedPackage)). + WithMarkdownHelp(markdownDescription) + run.CreateResultForRule(ruleID). + WithMessage(sarif.NewTextMessage(properties.Headline)). AddLocation( sarif.NewLocationWithPhysicalLocation( sarif.NewPhysicalLocation(). WithArtifactLocation( - sarif.NewSimpleArtifactLocation(techPackageDescriptor), - ), + sarif.NewSimpleArtifactLocation(properties.File), + ).WithRegion( + sarif.NewSimpleRegion(line, line). + WithStartColumn(column)), ), ) - return nil } +func generateSarifRuleID(properties *sarifProperties) string { + switch { + case properties.Cves != "": + return properties.Cves + case properties.XrayID != "": + return properties.XrayID + default: + return properties.File + } +} + func findMaxCVEScore(cves []formats.CveRow) (string, error) { maxCve := 0.0 for _, cve := range cves { diff --git a/xray/utils/resultwriter_test.go b/xray/utils/resultwriter_test.go index f27b3cad2..634065976 100644 --- a/xray/utils/resultwriter_test.go +++ b/xray/utils/resultwriter_test.go @@ -1,52 +1,90 @@ package utils import ( + "fmt" + "github.com/jfrog/jfrog-cli-core/v2/utils/coreutils" "github.com/jfrog/jfrog-cli-core/v2/xray/formats" "github.com/jfrog/jfrog-client-go/xray/services" "github.com/stretchr/testify/assert" + "path" "testing" ) func TestGenerateSarifFileFromScan(t *testing.T) { - currentScan := services.ScanResponse{ - Vulnerabilities: []services.Vulnerability{ + extendedResults := &ExtendedScanResults{ + XrayResults: []services.ScanResponse{ { - IssueId: "XRAY-1", - Summary: "summary-1", - Cves: []services.Cve{ + Vulnerabilities: []services.Vulnerability{ { - Id: "CVE-2022-0000", - CvssV3Score: "9", + Cves: []services.Cve{{Id: "CVE-2022-1234", CvssV3Score: "8.0"}, {Id: "CVE-2023-1234", CvssV3Score: "7.1"}}, + Summary: "A test vulnerability the harms nothing", + Severity: "High", + Components: map[string]services.Component{ + "vulnerability1": {FixedVersions: []string{"1.2.3"}}, + }, + Technology: coreutils.Go.ToString(), }, }, - Components: map[string]services.Component{ - "component-G": { - FixedVersions: []string{"[2.1.3]"}, - ImpactPaths: nil, - }, - }, - Technology: "go", }, }, - ScannedPackageType: "Go", + SecretsScanResults: []IacOrSecretResult{ + { + Severity: "Medium", + File: "found_secrets.js", + LineColumn: "1:18", + Type: "entropy", + Text: "AAA************", + }, + }, + IacScanResults: []IacOrSecretResult{ + { + Severity: "Medium", + File: "plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json", + LineColumn: "229:38", + Type: "entropy", + Text: "BBB************", + }, + }, + } + testCases := []struct { + name string + extendedResults *ExtendedScanResults + isMultipleRoots bool + markdownOutput bool + expectedSarifOutput string + }{ + { + name: "Scan results with vulnerabilities, secrets and IaC", + extendedResults: extendedResults, + expectedSarifOutput: "{\n \"version\": \"2.1.0\",\n \"$schema\": \"https://json.schemastore.org/sarif-2.1.0-rtm.5.json\",\n \"runs\": [\n {\n \"tool\": {\n \"driver\": {\n \"informationUri\": \"https://example.com/\",\n \"name\": \"JFrog Security\",\n \"rules\": [\n {\n \"id\": \"CVE-2022-1234, CVE-2023-1234\",\n \"shortDescription\": {\n \"text\": \"A test vulnerability the harms nothing\"\n },\n \"help\": {\n \"markdown\": \"\"\n },\n \"properties\": {\n \"security-severity\": \"8.0\"\n }\n },\n {\n \"id\": \"found_secrets.js\",\n \"shortDescription\": {\n \"text\": \"AAA************\"\n },\n \"help\": {\n \"markdown\": \"\"\n },\n \"properties\": {\n \"security-severity\": \"6.9\"\n }\n },\n {\n \"id\": \"plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json\",\n \"shortDescription\": {\n \"text\": \"BBB************\"\n },\n \"help\": {\n \"markdown\": \"\"\n },\n \"properties\": {\n \"security-severity\": \"6.9\"\n }\n }\n ]\n }\n },\n \"results\": [\n {\n \"ruleId\": \"CVE-2022-1234, CVE-2023-1234\",\n \"ruleIndex\": 0,\n \"message\": {\n \"text\": \"[CVE-2022-1234, CVE-2023-1234] vulnerability1 \"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"go.mod\"\n },\n \"region\": {\n \"startLine\": 0,\n \"startColumn\": 0,\n \"endLine\": 0\n }\n }\n }\n ]\n },\n {\n \"ruleId\": \"found_secrets.js\",\n \"ruleIndex\": 1,\n \"message\": {\n \"text\": \"Potential Secret Exposed\"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"found_secrets.js\"\n },\n \"region\": {\n \"startLine\": 1,\n \"startColumn\": 18,\n \"endLine\": 1\n }\n }\n }\n ]\n },\n {\n \"ruleId\": \"plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json\",\n \"ruleIndex\": 2,\n \"message\": {\n \"text\": \"Infrastructure as Code Vulnerability\"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json\"\n },\n \"region\": {\n \"startLine\": 229,\n \"startColumn\": 38,\n \"endLine\": 229\n }\n }\n }\n ]\n }\n ]\n }\n ]\n}", + }, + { + name: "Scan results with vulnerabilities, secrets and IaC as Markdown", + extendedResults: extendedResults, + markdownOutput: true, + expectedSarifOutput: "{\n \"version\": \"2.1.0\",\n \"$schema\": \"https://json.schemastore.org/sarif-2.1.0-rtm.5.json\",\n \"runs\": [\n {\n \"tool\": {\n \"driver\": {\n \"informationUri\": \"https://example.com/\",\n \"name\": \"JFrog Security\",\n \"rules\": [\n {\n \"id\": \"CVE-2022-1234, CVE-2023-1234\",\n \"shortDescription\": {\n \"text\": \"\"\n },\n \"help\": {\n \"markdown\": \"| Severity Score | Direct Dependencies | Fixed Versions |\\n| :---: | :----: | :---: |\\n| 8.0 | | 1.2.3 |\\n\"\n },\n \"properties\": {\n \"security-severity\": \"8.0\"\n }\n },\n {\n \"id\": \"found_secrets.js\",\n \"shortDescription\": {\n \"text\": \"\"\n },\n \"help\": {\n \"markdown\": \"| Severity | File | Line:Column | Secret |\\n| :---: | :---: | :---: | :---: |\\n| Medium | found_secrets.js | 1:18 | AAA************ |\"\n },\n \"properties\": {\n \"security-severity\": \"6.9\"\n }\n },\n {\n \"id\": \"plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json\",\n \"shortDescription\": {\n \"text\": \"\"\n },\n \"help\": {\n \"markdown\": \"| Severity | File | Line:Column | Finding |\\n| :---: | :---: | :---: | :---: |\\n| Medium | plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json | 229:38 | BBB************ |\"\n },\n \"properties\": {\n \"security-severity\": \"6.9\"\n }\n }\n ]\n }\n },\n \"results\": [\n {\n \"ruleId\": \"CVE-2022-1234, CVE-2023-1234\",\n \"ruleIndex\": 0,\n \"message\": {\n \"text\": \"[CVE-2022-1234, CVE-2023-1234] vulnerability1 \"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"go.mod\"\n },\n \"region\": {\n \"startLine\": 0,\n \"startColumn\": 0,\n \"endLine\": 0\n }\n }\n }\n ]\n },\n {\n \"ruleId\": \"found_secrets.js\",\n \"ruleIndex\": 1,\n \"message\": {\n \"text\": \"Potential Secret Exposed\"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"found_secrets.js\"\n },\n \"region\": {\n \"startLine\": 1,\n \"startColumn\": 18,\n \"endLine\": 1\n }\n }\n }\n ]\n },\n {\n \"ruleId\": \"plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json\",\n \"ruleIndex\": 2,\n \"message\": {\n \"text\": \"Infrastructure as Code Vulnerability\"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"plan/nonapplicable/req_sw_terraform_azure_compute_no_pass_auth.json\"\n },\n \"region\": {\n \"startLine\": 229,\n \"startColumn\": 38,\n \"endLine\": 229\n }\n }\n }\n ]\n }\n ]\n }\n ]\n}", + }, + { + name: "Scan results without vulnerabilities", + extendedResults: &ExtendedScanResults{}, + isMultipleRoots: true, + markdownOutput: true, + expectedSarifOutput: "{\n \"version\": \"2.1.0\",\n \"$schema\": \"https://json.schemastore.org/sarif-2.1.0-rtm.5.json\",\n \"runs\": [\n {\n \"tool\": {\n \"driver\": {\n \"informationUri\": \"https://example.com/\",\n \"name\": \"JFrog Security\",\n \"rules\": []\n }\n },\n \"results\": []\n }\n ]\n}", + }, } - var scanResults []services.ScanResponse - extendedResults := &ExtendedScanResults{XrayResults: scanResults} - scanResults = append(scanResults, currentScan) - sarif, err := GenerateSarifFileFromScan(scanResults, extendedResults, false, false) - assert.NoError(t, err) - expected := "{\"version\":\"2.1.0\",\"$schema\":\"https://json.schemastore.org/sarif-2.1.0-rtm.5.json\",\"runs\":[{\"tool\":{\"driver\":{\"informationUri\":\"https://jfrog.com/xray/\",\"name\":\"JFrog Xray\",\"rules\":[{\"id\":\"XRAY-1\",\"shortDescription\":null,\"help\":{\"markdown\":\"summary-1\"},\"properties\":{\"security-severity\":\"9.0\"}}]}},\"results\":[{\"ruleId\":\"XRAY-1\",\"ruleIndex\":0,\"message\":{\"text\":\"[CVE-2022-0000] Upgrade component-G: to [2.1.3]\"},\"locations\":[{\"physicalLocation\":{\"artifactLocation\":{\"uri\":\"go.mod\"}}}]}]}]}" - assert.JSONEq(t, expected, sarif) - sarif, err = GenerateSarifFileFromScan(scanResults, extendedResults, false, true) - assert.NoError(t, err) - expected = "{\n \"version\": \"2.1.0\",\n \"$schema\": \"https://json.schemastore.org/sarif-2.1.0-rtm.5.json\",\n \"runs\": [\n {\n \"tool\": {\n \"driver\": {\n \"informationUri\": \"https://jfrog.com/xray/\",\n \"name\": \"JFrog Xray\",\n \"rules\": [\n {\n \"id\": \"XRAY-1\",\n \"shortDescription\": null,\n \"help\": {\n \"markdown\": \"| Severity Score | Direct Dependencies | Fixed Versions |\\n| :--- | :----: | ---: |\\n| 9.0 | | [2.1.3] |\"\n },\n \"properties\": {\n \"security-severity\": \"9.0\"\n }\n }\n ]\n }\n },\n \"results\": [\n {\n \"ruleId\": \"XRAY-1\",\n \"ruleIndex\": 0,\n \"message\": {\n \"text\": \"[CVE-2022-0000] Upgrade component-G: to [2.1.3]\"\n },\n \"locations\": [\n {\n \"physicalLocation\": {\n \"artifactLocation\": {\n \"uri\": \"go.mod\"\n }\n }\n }\n ]\n }\n ]\n }\n ]\n}" - assert.JSONEq(t, expected, sarif) + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + sarifOutput, err := GenerateSarifFileFromScan(testCase.extendedResults, testCase.isMultipleRoots, testCase.markdownOutput, "JFrog Security", "https://example.com/") + assert.NoError(t, err) + assert.Equal(t, testCase.expectedSarifOutput, sarifOutput) + }) + } } -func TestGetHeadline(t *testing.T) { - assert.Equal(t, "[CVE-2022-1234] Upgrade loadsh:1.4.1 to 2.0.0", getHeadline("loadsh", "1.4.1", "CVE-2022-1234", "2.0.0")) - assert.NotEqual(t, "[CVE-2022-1234] Upgrade loadsh:1.4.1 to 2.0.0", getHeadline("loadsh", "1.2.1", "CVE-2022-1234", "2.0.0")) +func TestGetVulnerabilityOrViolationSarifHeadline(t *testing.T) { + assert.Equal(t, "[CVE-2022-1234] loadsh 1.4.1", getVulnerabilityOrViolationSarifHeadline("loadsh", "1.4.1", "CVE-2022-1234")) + assert.NotEqual(t, "[CVE-2022-1234] loadsh 1.4.1", getVulnerabilityOrViolationSarifHeadline("loadsh", "1.2.1", "CVE-2022-1234")) } func TestGetCves(t *testing.T) { @@ -57,3 +95,273 @@ func TestGetCves(t *testing.T) { assert.Equal(t, "CVE-2022-1234, CVE-2019-1234", getCves(cvesRow, issueId)) assert.Equal(t, issueId, getCves(nil, issueId)) } + +func TestGetIacOrSecretsProperties(t *testing.T) { + testCases := []struct { + name string + secretOrIac formats.IacSecretsRow + markdownOutput bool + isSecret bool + expectedOutput sarifProperties + }{ + { + name: "Infrastructure as Code vulnerability without markdown output", + secretOrIac: formats.IacSecretsRow{ + Severity: "high", + File: path.Join("path", "to", "file"), + LineColumn: "10:5", + Text: "Vulnerable code", + Type: "Terraform", + }, + markdownOutput: false, + isSecret: false, + expectedOutput: sarifProperties{ + Applicable: "", + Cves: "", + Headline: "Infrastructure as Code Vulnerability", + Severity: "8.9", + Description: "Vulnerable code", + MarkdownDescription: "", + XrayID: "", + File: path.Join("path", "to", "file"), + LineColumn: "10:5", + SecretsOrIacType: "Terraform", + }, + }, + { + name: "Potential secret exposed with markdown output", + secretOrIac: formats.IacSecretsRow{ + Severity: "medium", + File: path.Join("path", "to", "file"), + LineColumn: "5:3", + Text: "Potential secret", + Type: "AWS Secret Manager", + }, + markdownOutput: true, + isSecret: true, + expectedOutput: sarifProperties{ + Applicable: "", + Cves: "", + Headline: "Potential Secret Exposed", + Severity: "6.9", + Description: "Potential secret", + MarkdownDescription: fmt.Sprintf("| Severity | File | Line:Column | Secret |\n| :---: | :---: | :---: | :---: |\n| medium | %s | 5:3 | Potential secret |", path.Join("path", "to", "file")), + XrayID: "", + File: path.Join("path", "to", "file"), + LineColumn: "5:3", + SecretsOrIacType: "AWS Secret Manager", + }, + }, + } + + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + output := getIacOrSecretsProperties(testCase.secretOrIac, testCase.markdownOutput, testCase.isSecret) + assert.Equal(t, testCase.expectedOutput.Applicable, output.Applicable) + assert.Equal(t, testCase.expectedOutput.Cves, output.Cves) + assert.Equal(t, testCase.expectedOutput.Headline, output.Headline) + assert.Equal(t, testCase.expectedOutput.Severity, output.Severity) + assert.Equal(t, testCase.expectedOutput.Description, output.Description) + assert.Equal(t, testCase.expectedOutput.MarkdownDescription, output.MarkdownDescription) + assert.Equal(t, testCase.expectedOutput.XrayID, output.XrayID) + assert.Equal(t, testCase.expectedOutput.File, output.File) + assert.Equal(t, testCase.expectedOutput.LineColumn, output.LineColumn) + assert.Equal(t, testCase.expectedOutput.SecretsOrIacType, output.SecretsOrIacType) + }) + } +} + +func TestGetViolatedDepsSarifProps(t *testing.T) { + testCases := []struct { + name string + vulnerability formats.VulnerabilityOrViolationRow + markdownOutput bool + expectedOutput sarifProperties + }{ + { + name: "Vulnerability with markdown output", + vulnerability: formats.VulnerabilityOrViolationRow{ + Summary: "Vulnerable dependency", + Severity: "high", + Applicable: "Applicable", + ImpactedDependencyName: "example-package", + ImpactedDependencyVersion: "1.0.0", + ImpactedDependencyType: "npm", + FixedVersions: []string{"1.0.1", "1.0.2"}, + Components: []formats.ComponentRow{ + {Name: "example-package", Version: "1.0.0"}, + }, + Cves: []formats.CveRow{ + {Id: "CVE-2021-1234", CvssV3: "7.2"}, + {Id: "CVE-2021-5678", CvssV3: "7.2"}, + }, + IssueId: "XRAY-12345", + }, + markdownOutput: true, + expectedOutput: sarifProperties{ + Applicable: "Applicable", + Cves: "CVE-2021-1234, CVE-2021-5678", + Headline: "[CVE-2021-1234, CVE-2021-5678] example-package 1.0.0", + Severity: "7.2", + Description: "Vulnerable dependency", + MarkdownDescription: "| Severity Score | Contextual Analysis | Direct Dependencies | Fixed Versions |\n| :---: | :---: | :---: | :---: |\n| 7.2 | Applicable | `example-package 1.0.0` | 1.0.1, 1.0.2 |\n", + }, + }, + { + name: "Vulnerability without markdown output", + vulnerability: formats.VulnerabilityOrViolationRow{ + Summary: "Vulnerable dependency", + Severity: "high", + Applicable: "Applicable", + ImpactedDependencyName: "example-package", + ImpactedDependencyVersion: "1.0.0", + ImpactedDependencyType: "npm", + FixedVersions: []string{"1.0.1", "1.0.2"}, + Components: []formats.ComponentRow{ + {Name: "example-package", Version: "1.0.0"}, + }, + Cves: []formats.CveRow{ + {Id: "CVE-2021-1234", CvssV3: "7.2"}, + {Id: "CVE-2021-5678", CvssV3: "7.2"}, + }, + IssueId: "XRAY-12345", + }, + expectedOutput: sarifProperties{ + Applicable: "Applicable", + Cves: "CVE-2021-1234, CVE-2021-5678", + Headline: "[CVE-2021-1234, CVE-2021-5678] example-package 1.0.0", + Severity: "7.2", + Description: "Vulnerable dependency", + MarkdownDescription: "", + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + output, err := getViolatedDepsSarifProps(tc.vulnerability, tc.markdownOutput) + assert.NoError(t, err) + assert.Equal(t, tc.expectedOutput.Cves, output.Cves) + assert.Equal(t, tc.expectedOutput.Severity, output.Severity) + assert.Equal(t, tc.expectedOutput.XrayID, output.XrayID) + assert.Equal(t, tc.expectedOutput.MarkdownDescription, output.MarkdownDescription) + assert.Equal(t, tc.expectedOutput.Applicable, output.Applicable) + assert.Equal(t, tc.expectedOutput.Description, output.Description) + assert.Equal(t, tc.expectedOutput.Headline, output.Headline) + }) + } +} + +func TestGetDirectDependenciesFormatted(t *testing.T) { + testCases := []struct { + name string + directDeps []formats.ComponentRow + expectedOutput string + }{ + { + name: "Single direct dependency", + directDeps: []formats.ComponentRow{ + {Name: "example-package", Version: "1.0.0"}, + }, + expectedOutput: "`example-package 1.0.0`", + }, + { + name: "Multiple direct dependencies", + directDeps: []formats.ComponentRow{ + {Name: "dependency1", Version: "1.0.0"}, + {Name: "dependency2", Version: "2.0.0"}, + }, + expectedOutput: "`dependency1 1.0.0`
`dependency2 2.0.0`", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + output, err := getDirectDependenciesFormatted(tc.directDeps) + assert.NoError(t, err) + assert.Equal(t, tc.expectedOutput, output) + }) + } +} + +func TestGetSarifTableDescription(t *testing.T) { + testCases := []struct { + name string + formattedDeps string + maxCveScore string + applicable string + fixedVersions []string + expectedDescription string + }{ + { + name: "Applicable vulnerability", + formattedDeps: "`example-package 1.0.0`", + maxCveScore: "7.5", + applicable: "Applicable", + fixedVersions: []string{"1.0.1", "1.0.2"}, + expectedDescription: "| Severity Score | Contextual Analysis | Direct Dependencies | Fixed Versions |\n| :---: | :---: | :---: | :---: |\n| 7.5 | Applicable | `example-package 1.0.0` | 1.0.1, 1.0.2 |", + }, + { + name: "Non-applicable vulnerability", + formattedDeps: "`example-package 2.0.0`", + maxCveScore: "6.2", + applicable: "", + fixedVersions: []string{"2.0.1"}, + expectedDescription: "| Severity Score | Direct Dependencies | Fixed Versions |\n| :---: | :----: | :---: |\n| 6.2 | `example-package 2.0.0` | 2.0.1 |", + }, + { + name: "No fixed versions", + formattedDeps: "`example-package 3.0.0`", + maxCveScore: "3.0", + applicable: "", + fixedVersions: []string{}, + expectedDescription: "| Severity Score | Direct Dependencies | Fixed Versions |\n| :---: | :----: | :---: |\n| 3.0 | `example-package 3.0.0` | No fix available |", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + output := getSarifTableDescription(tc.formattedDeps, tc.maxCveScore, tc.applicable, tc.fixedVersions) + assert.Equal(t, tc.expectedDescription, output) + }) + } +} + +func TestFindMaxCVEScore(t *testing.T) { + testCases := []struct { + name string + cves []formats.CveRow + expectedOutput string + expectedError bool + }{ + { + name: "CVEScore with valid float values", + cves: []formats.CveRow{ + {Id: "CVE-2021-1234", CvssV3: "7.5"}, + {Id: "CVE-2021-5678", CvssV3: "9.2"}, + }, + expectedOutput: "9.2", + }, + { + name: "CVEScore with invalid float value", + cves: []formats.CveRow{ + {Id: "CVE-2022-4321", CvssV3: "invalid"}, + }, + expectedOutput: "", + expectedError: true, + }, + { + name: "CVEScore without values", + cves: []formats.CveRow{}, + expectedOutput: "0.0", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + output, err := findMaxCVEScore(tc.cves) + assert.False(t, tc.expectedError && err == nil) + assert.Equal(t, tc.expectedOutput, output) + }) + } +}