diff --git a/.gitattributes b/.gitattributes index e53d317832e..fb4bf66a4d0 100644 --- a/.gitattributes +++ b/.gitattributes @@ -47,3 +47,13 @@ ############################################################################### *.png binary *.snk binary + +############################################################################### +# Define branch specific files by overriding the merge driver +############################################################################### +global.json merge=ours +eng/branch-vscode-config merge=ours +eng/common merge=ours +eng/Common.props merge=ours +eng/Versions.props merge=ours +eng/Version.Details.xml merge=ours diff --git a/.github/actions/action-utils.js b/.github/actions/action-utils.js index 88491c7b226..b401cf2a1ba 100644 --- a/.github/actions/action-utils.js +++ b/.github/actions/action-utils.js @@ -51,4 +51,4 @@ module.exports.friendlyDateFromISODate = function(isoDate) { module.exports.splitVersionTag = splitVersionTag; module.exports.readFile = (fileName) => util.promisify(fs.readFile)(fileName, 'utf8'); -module.exports.writeFile = (fileName, contents) => util.promisify(fs.writeFile)(fileName, contents); \ No newline at end of file +module.exports.writeFile = (fileName, contents) => util.promisify(fs.writeFile)(fileName, contents); diff --git a/.github/actions/generate-release-notes/index.js b/.github/actions/generate-release-notes/index.js index 739522a8d7c..f981e769965 100644 --- a/.github/actions/generate-release-notes/index.js +++ b/.github/actions/generate-release-notes/index.js @@ -155,6 +155,10 @@ async function generateChangelog(octokit, branch, repoOwner, repoName, minMergeD changelog.push(entry); } + if (changelog.length === 0) { + changelog.push("- Updated dependencies"); + } + return changelog.join("\n"); } diff --git a/.github/actions/learning-path-staleness-check/action.yml b/.github/actions/learning-path-staleness-check/action.yml new file mode 100644 index 00000000000..4e651ad6ee8 --- /dev/null +++ b/.github/actions/learning-path-staleness-check/action.yml @@ -0,0 +1,30 @@ +name: 'CheckLearningPathLinks Action' +description: 'Checks the links in the learning paths for staleness' +inputs: + repoURLToSearch: + description: 'The repo referenced in the learning path docs (in URL form)' + required: true + learningPathsDirectory: + description: 'The directory where the learning paths reside (e.g. documentation/learningPath)' + required: true + changedFilePaths: + description: 'Paths to the changed files' + required: false + sourceDirectoryName: + description: 'The name of the top-level directory (e.g. src) - only files inside this directory will be included in the search' + required: true + oldHash: + description: 'The hash currently being used by the learning path' + required: true + newHash: + description: 'The hash to be inserted into the updated learning path' + required: true + learningPathHashFile: + description: 'The file where the commit hash being used for the learning paths is kept' + required: true + excludeLinks: + description: 'Comma separated strings to exclude from processing' + required: false +runs: + using: 'node16' + main: 'index.js' diff --git a/.github/actions/learning-path-staleness-check/index.js b/.github/actions/learning-path-staleness-check/index.js new file mode 100644 index 00000000000..0b8e3891711 --- /dev/null +++ b/.github/actions/learning-path-staleness-check/index.js @@ -0,0 +1,274 @@ +const actionUtils = require('../action-utils.js'); +const fs = require('fs'); +const path = require('path'); +const prevPathPrefix = "prev/"; +const linePrefix = "#L"; +const separator = " | "; + +modifiedFilesPathToLearningPathFile = {}; +modifiedFilesUrlToFileName = {}; + +var outOfSync = new Set(); +var manuallyReview = new Set(); +var suggestions = new Set(); +var modifiedFiles = new Set(); + +const oldNewLinkSeparator = ' -> '; + +function ReplaceOldWithNewText(content, oldText, newText) +{ + return content.replaceAll(oldText, newText); +} + +function UpdateModifiedFiles(fileName, path, learningPathFile) +{ + modifiedFilesUrlToFileName[path] = fileName; + + modifiedFilesPathToLearningPathFile[path] = modifiedFilesPathToLearningPathFile[path] ? modifiedFilesPathToLearningPathFile[path] : new Set();; + modifiedFilesPathToLearningPathFile[path].add(learningPathFile); + + modifiedFiles = new Set(); + for (currPath in modifiedFilesPathToLearningPathFile) + { + const fileName = modifiedFilesUrlToFileName[currPath]; + modifiedFiles.add(AssembleModifiedFilesOutput(fileName, currPath, Array.from(modifiedFilesPathToLearningPathFile[currPath]))); + } +} + +function AssembleModifiedFilesOutput(fileName, path, learningPathFiles) +{ + return CreateLink(fileName, path, undefined) + separator + BoldedText(learningPathFiles.join(" ")); +} + +function BoldedText(text) +{ + return "**" + text + "**"; +} + +function UpdateManuallyReview(fileName, path, learningPathFile, learningPathLineNumber, lineNumber = undefined) +{ + manuallyReview.add(AssembleOutput(fileName, path, undefined, lineNumber, undefined, learningPathFile, learningPathLineNumber)) +} + +function UpdateOutOfSync(link, learningPathFile) +{ + outOfSync.add(link + separator + BoldedText(learningPathFile)) +} + +function UpdateSuggestions(fileName, oldPath, newPath, learningPathFile, learningPathLineNumber, oldLineNumber, newLineNumber) +{ + suggestions.add(AssembleOutput(fileName, oldPath, newPath, oldLineNumber, newLineNumber, learningPathFile, learningPathLineNumber)) +} + +function CreateLink(fileName, path, lineNumber) +{ + var codeFileLink = "[" + fileName + "]" + "(" + path + ")" + return AppendLineNumber(codeFileLink, lineNumber) +} + +function AssembleOutput(fileName, oldPath, newPath, oldLineNumber, newLineNumber, learningPathFile, learningPathLineNumber) +{ + var codeFileLink = CreateLink(fileName, oldPath, oldLineNumber) + + if (newPath && newLineNumber) { + codeFileLink += oldNewLinkSeparator + CreateLink(fileName, newPath, newLineNumber) + } + + return codeFileLink + separator + BoldedText(AppendLineNumber(learningPathFile, learningPathLineNumber, undefined)); +} + +function AppendLineNumber(text, lineNumber) +{ + if (!lineNumber) { return text } + + return text + " " + linePrefix + lineNumber +} + +function CheckForEndOfLink(str, startIndex) +{ + const illegalCharIndex = str.substr(startIndex).search(/[\>\])\s]|$|.$|.\s/m); // This regex isn't perfect, but should cover most cases. + return illegalCharIndex; +} + +function StripLineNumber(link, linePrefixIndex) +{ + return link.substring(0, linePrefixIndex); +} + +function GetContent(path) { + try { + return fs.readFileSync(path, 'utf8'); + } + catch (error) {} + + return undefined; +} + +function ConstructOutputText(core) +{ + var body = ""; + + if (manuallyReview.size > 0) { body += "

Manually Review:

" + Array.from(manuallyReview).join("
") + "
"; } + + if (outOfSync.size > 0) { body += "

Links With Out Of Sync Commit Hashes:

" + Array.from(outOfSync).join("
") + "
"; } + + if (suggestions.size > 0) { body += "

Auto-Applied Suggestions:

" + Array.from(suggestions).join("
") + "
"; } + + if (modifiedFiles.size > 0) { body += "

Modified Files:

" + Array.from(modifiedFiles).join("
") + "
"; } + + console.log("body=" + body); + core.setOutput('outputText', body); +} + +function ValidateLinks(learningPathContents, repoURLToSearch, modifiedPRFiles, learningPathFile, oldHash, newHash, sourceDirectoryName, excludeLinksArray) +{ + // Get all indices where a link to the repo is found within the current learning path file + var linkIndices = []; + for(var pos = learningPathContents.indexOf(repoURLToSearch); pos !== -1; pos = learningPathContents.indexOf(repoURLToSearch, pos + 1)) { + linkIndices.push(pos); + } + + for(let startOfLink of linkIndices) + { + // Clean up the link, determine if it has a line number suffix + let endOfLink = startOfLink + CheckForEndOfLink(learningPathContents, startOfLink) + if (endOfLink < startOfLink) { endOfLink = learningPathContents.length; } // If no illegal characters are found, the link is at the end of the file + + const link = learningPathContents.substring(startOfLink, endOfLink); + + if (excludeLinksArray.some(excludeLink => link.toLowerCase().includes(excludeLink))) { continue; } + + const pathStartIndex = link.indexOf(sourceDirectoryName); + + if (pathStartIndex === -1) { continue } + + if (!link.includes(oldHash)) + { + UpdateOutOfSync(link, learningPathFile); + continue + } + + const linePrefixIndex = link.indexOf(linePrefix); + const linkHasLineNumber = linePrefixIndex !== -1; + const pathEndIndex = linkHasLineNumber ? linePrefixIndex : endOfLink; + + // Check if the file being referenced by the link is one of the modified files in the PR + const linkFilePath = link.substring(pathStartIndex, pathEndIndex); + if (modifiedPRFiles.includes(linkFilePath)) + { + const fileName = linkFilePath.substring(linkFilePath.lastIndexOf('/') + 1); + + UpdateModifiedFiles(fileName, linkHasLineNumber ? StripLineNumber(link, linePrefixIndex) : link, learningPathFile); + + // This is the line number in the learning path file that contains the link - not the #L line number in the link itself + const learningPathLineNumber = learningPathContents.substring(0, startOfLink).split("\n").length; + + var headContent = GetContent(linkFilePath) + if (!headContent) { + UpdateManuallyReview(fileName, link, learningPathFile, learningPathLineNumber); + continue + } + const headContentLines = headContent.toString().split("\n"); + + if (!linkHasLineNumber) { continue; } + const oldLineNumber = Number(link.substring(linePrefixIndex + linePrefix.length, link.length)); + + var prevContent = GetContent(prevPathPrefix + linkFilePath) + if (!prevContent) { continue; } + const prevContentLines = prevContent.toString().split("\n"); + + if (prevContentLines.length < oldLineNumber) + { + UpdateManuallyReview(fileName, link, learningPathFile, learningPathLineNumber, oldLineNumber); + } + else if (headContentLines.length < oldLineNumber || prevContentLines[oldLineNumber - 1].trim() !== headContentLines[oldLineNumber - 1].trim()) + { + const newLineNumberLast = headContentLines.lastIndexOf(prevContentLines[oldLineNumber - 1]) + 1; + const newLineNumberFirst = headContentLines.indexOf(prevContentLines[oldLineNumber - 1]) + 1; + + if (newLineNumberLast !== newLineNumberFirst) // Multiple matches found in the file + { + UpdateManuallyReview(fileName, link, learningPathFile, learningPathLineNumber, oldLineNumber); + } + else + { + let updatedLink = StripLineNumber(link.replace(oldHash, newHash), linePrefixIndex) + linePrefix + newLineNumberFirst; + UpdateSuggestions(fileName, link, updatedLink, learningPathFile, learningPathLineNumber, oldLineNumber, newLineNumberFirst); + } + } + } + } +} + +const main = async () => { + + const [core] = await actionUtils.installAndRequirePackages("@actions/core"); + + try { + const learningPathDirectory = core.getInput('learningPathsDirectory', { required: true }); + const repoURLToSearch = core.getInput('repoURLToSearch', { required: true }); + const changedFilePaths = core.getInput('changedFilePaths', {required: false}); + const learningPathHashFile = core.getInput('learningPathHashFile', { required: true }); + const sourceDirectoryName = core.getInput('sourceDirectoryName', { required: true }); + const oldHash = core.getInput('oldHash', { required: true }); + const newHash = core.getInput('newHash', { required: true }); + const excludeLinks = core.getInput('excludeLinks', { required: false }); + const excludeLinksArray = excludeLinks ? excludeLinks.split(',').map(function(item) { return item.toLowerCase().trim() }) : []; + + if (changedFilePaths === null || changedFilePaths.trim() === "") { return } + + // Scan each file in the learningPaths directory + fs.readdir(learningPathDirectory, (_, files) => { + files.forEach(learningPathFile => { + try { + const learningPathContents = GetContent(path.join(learningPathDirectory, learningPathFile)) + if (learningPathContents) + { + ValidateLinks(learningPathContents, repoURLToSearch, changedFilePaths.split(' '), learningPathFile, oldHash, newHash, sourceDirectoryName, excludeLinksArray) + ConstructOutputText(core); + } + } catch (error) { + console.log("Error: " + error) + console.log("Could not find learning path file: " + learningPathFile) + } + }); + }); + + fs.writeFileSync(learningPathHashFile, newHash); + + // Scan each file in the learningPaths directory + fs.readdir(learningPathDirectory, (_, files) => { + + files.forEach(learningPathFile => { + try { + const fullPath = path.join(learningPathDirectory, learningPathFile) + let content = fs.readFileSync(fullPath, 'utf8') + + let suggestionsArray = Array.from(suggestions); + if (suggestionsArray && suggestionsArray.length > 0) { + suggestionsArray.forEach(suggestion => { + const suggestionArray = suggestion.split(oldNewLinkSeparator) + var oldLink = suggestionArray[0] + var newLink = suggestionArray[1] + oldLink = oldLink.substring(oldLink.indexOf('(') + 1, oldLink.lastIndexOf(')')) + newLink = newLink.substring(newLink.indexOf('(') + 1, newLink.lastIndexOf(')')) + content = ReplaceOldWithNewText(content, oldLink, newLink) + }) + } + + content = ReplaceOldWithNewText(content, oldHash, newHash) + fs.writeFileSync(fullPath, content); + } catch (error) { + console.log("Error: " + error) + console.log("Could not find learning path file: " + learningPathFile) + } + }); + }); + + } catch (error) { + core.setFailed(error.message); + } +} + +// Call the main function to run the action +main(); diff --git a/.github/actions/open-pr/action.yml b/.github/actions/open-pr/action.yml index 565c0b17a4d..40a0fb84292 100644 --- a/.github/actions/open-pr/action.yml +++ b/.github/actions/open-pr/action.yml @@ -25,6 +25,9 @@ inputs: fail_if_files_unchanged: description: 'Fails the action if all of the specified files_to_commit are unchanged.' required: false + always_create_pr: + description: 'Always try to create a PR, even if there are no uncommitted changes.' + required: false update_if_already_exists: description: 'Allows updating an existing PR, or re-opening it if closed.' required: false @@ -49,8 +52,8 @@ runs: base_branch_name=${BASE_BRANCH_NAME:-${current_branch_name}} - git config user.name "github-actions" - git config user.email "github-actions@github.com" + git config user.name "github-actions[bot]" + git config user.email "41898282+github-actions[bot]@users.noreply.github.com" git add ${{ inputs.files_to_commit }} are_files_changed="" @@ -61,10 +64,13 @@ runs: exit 1 fi - exit 0 + if [[ "$ALWAYS_CREATE_PR" != "true" ]]; then + exit 0 + fi + else + git commit -m "$COMMIT_MESSAGE" fi - git commit -m "$COMMIT_MESSAGE" git push --force --set-upstream origin "HEAD:$pr_branch_name" extraArgs="" @@ -81,6 +87,7 @@ runs: COMMIT_MESSAGE: ${{ inputs.commit_message }} DRAFT: ${{ inputs.draft }} LABELS: ${{ inputs.labels }} + ALWAYS_CREATE_PR: ${{ inputs.always_create_pr }} FAIL_IF_FILES_UNCHANGED: ${{ inputs.fail_if_files_unchanged }} UPDATE_IF_ALREADY_EXISTS: ${{ inputs.update_if_already_exists }} TITLE: ${{ inputs.title }} diff --git a/.github/dependabot.template.yml b/.github/dependabot.template.yml index 30abd10b697..f16b4a6a91d 100644 --- a/.github/dependabot.template.yml +++ b/.github/dependabot.template.yml @@ -19,6 +19,7 @@ #@ def getBranches(): #@ return [ #@ struct.encode({"name": "main", "majorVersion": "8"}), +#@ struct.encode({"name": "release/8.0", "majorVersion": "8"}), #@ struct.encode({"name": "release/7.x", "majorVersion": "7"}), #@ struct.encode({"name": "release/6.x", "majorVersion": "6"}), #@ ] @@ -34,13 +35,14 @@ updates: #@ for branch in getBranches(): #@ commit_prefix = "[" + branch.name + "] " - package-ecosystem: "nuget" - directory: "/eng/dependabot" + directory: "/eng/dependabot/independent" schedule: interval: "daily" target-branch: #@ branch.name ignore: - dependency-name: "Microsoft.Extensions.*" update-types: [ "version-update:semver-major" ] + - dependency-name: "Moq" commit-message: prefix: #@ commit_prefix groups: diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 90693540c96..f11506ebd96 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -6,7 +6,7 @@ updates: interval: daily target-branch: main - package-ecosystem: nuget - directory: /eng/dependabot + directory: /eng/dependabot/independent schedule: interval: daily target-branch: main @@ -14,6 +14,7 @@ updates: - dependency-name: Microsoft.Extensions.* update-types: - version-update:semver-major + - dependency-name: Moq commit-message: prefix: '[main] ' groups: @@ -81,7 +82,83 @@ updates: - Microsoft.Extensions.* - Microsoft.NETCore.App.Runtime.* - package-ecosystem: nuget - directory: /eng/dependabot + directory: /eng/dependabot/independent + schedule: + interval: daily + target-branch: release/8.0 + ignore: + - dependency-name: Microsoft.Extensions.* + update-types: + - version-update:semver-major + - dependency-name: Moq + commit-message: + prefix: '[release/8.0] ' + groups: + azure-storage-dependencies: + patterns: + - Azure.Storage.* + identity-dependencies: + patterns: + - Azure.Identity + - Microsoft.Identity.* + - Microsoft.IdentityModel.* +- package-ecosystem: nuget + directory: /eng/dependabot/nuget.org + schedule: + interval: daily + target-branch: release/8.0 + commit-message: + prefix: '[release/8.0] ' +- package-ecosystem: nuget + directory: /eng/dependabot/net8.0 + schedule: + interval: daily + target-branch: release/8.0 + ignore: + - dependency-name: '*' + update-types: + - version-update:semver-major + commit-message: + prefix: '[release/8.0] ' + groups: + runtime-dependencies: + patterns: + - Microsoft.Extensions.* + - Microsoft.NETCore.App.Runtime.* +- package-ecosystem: nuget + directory: /eng/dependabot/net7.0 + schedule: + interval: daily + target-branch: release/8.0 + ignore: + - dependency-name: '*' + update-types: + - version-update:semver-major + commit-message: + prefix: '[release/8.0] ' + groups: + runtime-dependencies: + patterns: + - Microsoft.Extensions.* + - Microsoft.NETCore.App.Runtime.* +- package-ecosystem: nuget + directory: /eng/dependabot/net6.0 + schedule: + interval: daily + target-branch: release/8.0 + ignore: + - dependency-name: '*' + update-types: + - version-update:semver-major + commit-message: + prefix: '[release/8.0] ' + groups: + runtime-dependencies: + patterns: + - Microsoft.Extensions.* + - Microsoft.NETCore.App.Runtime.* +- package-ecosystem: nuget + directory: /eng/dependabot/independent schedule: interval: daily target-branch: release/7.x @@ -89,6 +166,7 @@ updates: - dependency-name: Microsoft.Extensions.* update-types: - version-update:semver-major + - dependency-name: Moq commit-message: prefix: '[release/7.x] ' groups: @@ -140,7 +218,7 @@ updates: - Microsoft.Extensions.* - Microsoft.NETCore.App.Runtime.* - package-ecosystem: nuget - directory: /eng/dependabot + directory: /eng/dependabot/independent schedule: interval: daily target-branch: release/6.x @@ -148,6 +226,7 @@ updates: - dependency-name: Microsoft.Extensions.* update-types: - version-update:semver-major + - dependency-name: Moq commit-message: prefix: '[release/6.x] ' groups: diff --git a/.github/learning-path-sha.txt b/.github/learning-path-sha.txt new file mode 100644 index 00000000000..908276e089a --- /dev/null +++ b/.github/learning-path-sha.txt @@ -0,0 +1 @@ +963f5824e2e4cef8fb6a969a03abb7d26e7eec56 \ No newline at end of file diff --git a/.github/policies/resourceManagement.yml b/.github/policies/resourceManagement.yml index 1a4a75b9ce4..db66955e47d 100644 --- a/.github/policies/resourceManagement.yml +++ b/.github/policies/resourceManagement.yml @@ -93,7 +93,7 @@ configuration: - isAction: action: Edited - isAction: - action: Null + action: Closed - not: isActivitySender: user: msftbot @@ -115,8 +115,6 @@ configuration: action: Reopened - isAction: action: Opened - - isAction: - action: Null - not: hasLabel: label: needs-review @@ -158,14 +156,11 @@ configuration: - payloadType: Pull_Request - hasLabel: label: needs-review - - or: - - isAction: - action: Closed - - isAction: - action: Null + - isAction: + action: Closed then: - removeLabel: label: needs-review - description: Remove needs-review Label On Merge Or Close + description: Remove needs-review Label On Close (merged or not) onFailure: -onSuccess: +onSuccess: diff --git a/.github/releases.json b/.github/releases.json index 496755acdc3..5385f66d27d 100644 --- a/.github/releases.json +++ b/.github/releases.json @@ -3,7 +3,9 @@ "additionalMonthsOfSupportOnNewMinorRelease": 3, "cleanupUnsupportedReleasesAfterMonths": 12 }, - "preview": [], + "preview": [ + "9.0" + ], "supported": [ "8.0", "7.3", @@ -16,9 +18,9 @@ ], "releases": { "6.3": { - "tag": "v6.3.4", + "tag": "v6.3.5", "minorReleaseDate": "2022-10-11T00:00:00.000Z", - "patchReleaseDate": "2023-11-14T00:00:00.000Z", + "patchReleaseDate": "2024-02-13T00:00:00.000Z", "supportedFrameworks": [ "net6.0", "netcoreapp3.1" @@ -45,9 +47,9 @@ "outOfSupportDate": "2023-09-13T00:00:00.000Z" }, "8.0": { - "tag": "v8.0.0", + "tag": "v8.0.1", "minorReleaseDate": "2023-11-14T00:00:00.000Z", - "patchReleaseDate": "2023-11-14T00:00:00.000Z", + "patchReleaseDate": "2024-02-13T00:00:00.000Z", "supportedFrameworks": [ "net8.0" ] @@ -63,13 +65,21 @@ "outOfSupportDate": "2023-11-08T00:00:00.000Z" }, "7.3": { - "tag": "v7.3.2", + "tag": "v7.3.3", "minorReleaseDate": "2023-08-08T00:00:00.000Z", - "patchReleaseDate": "2023-11-14T00:00:00.000Z", + "patchReleaseDate": "2024-02-13T00:00:00.000Z", "supportedFrameworks": [ "net6.0", "net7.0" ] + }, + "9.0": { + "tag": "v9.0.0-preview.1.24106.4", + "minorReleaseDate": "2024-02-13T00:00:00.000Z", + "patchReleaseDate": "2024-02-13T00:00:00.000Z", + "supportedFrameworks": [ + "net9.0" + ] } } } \ No newline at end of file diff --git a/.github/workflows/add-markdown-feedback.yml b/.github/workflows/add-markdown-feedback.yml index 2eba99b19ef..d533cd593ae 100644 --- a/.github/workflows/add-markdown-feedback.yml +++ b/.github/workflows/add-markdown-feedback.yml @@ -28,7 +28,7 @@ jobs: - name: Get changed files run: | - changed_source_files=$(git diff-tree --no-commit-id --name-only -r "$base_sha" "$GITHUB_SHA" -- documentation ':!documentation/releaseNotes/*' | { grep "**.md$" || test $? = 1; }) + changed_source_files=$(git diff-tree --no-commit-id --name-only --diff-filter=d -r "$base_sha" "$GITHUB_SHA" -- documentation ':!documentation/releaseNotes/*' | { grep "**.md$" || test $? = 1; }) echo "Files to validate: '${changed_source_files}'" changed_source_files=$(echo "$changed_source_files" | xargs | sed 's/ documentation/,documentation/g') echo "updated_files=$(echo ${changed_source_files})" >> $GITHUB_ENV @@ -47,7 +47,7 @@ jobs: git diff > ./pr/linter.diff - name: Upload artifacts - uses: actions/upload-artifact@26f96dfa697d77e81fd5907df203aa23a56210a8 + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 with: name: pr-linter path: pr/ diff --git a/.github/workflows/check-learning-path-links.yml b/.github/workflows/check-learning-path-links.yml new file mode 100644 index 00000000000..dfe9d2d5e2d --- /dev/null +++ b/.github/workflows/check-learning-path-links.yml @@ -0,0 +1,66 @@ +name: 'Check Learning Path Links' +on: + schedule: # Run once a month + - cron: '0 0 1 * *' + workflow_dispatch: + +permissions: {} + +jobs: + check-learning-path-links: + if: github.repository == 'dotnet/dotnet-monitor' + name: 'Check Learning Path Links' + runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + + steps: + - name: Checkout head + uses: actions/checkout@v4 + with: + persist-credentials: true # need this for opening a PR + fetch-depth: 0 + ref: main + + - name: Get previous update SHA + id: get_sha + run: | + prev_sha=$(cat .github/learning-path-sha.txt) + echo "prev_sha=$prev_sha" >> $GITHUB_ENV + + - name: Checkout previous update + uses: actions/checkout@v4 + with: + persist-credentials: false + ref: ${{ env.prev_sha }} + path: prev + + - name: Get changed files + run: | + changed_source_files=$(git diff-tree --no-commit-id --name-only -r "$prev_sha" "$GITHUB_SHA" | { grep "**.cs$" || test $? = 1; }) + echo "Files to validate: '${changed_source_files}'" + echo "updated_files=$(echo ${changed_source_files})" >> $GITHUB_ENV + + - name: Check Learning Path Links + id: check-links + uses: ./.github/actions/learning-path-staleness-check + with: + repoURLToSearch: 'https://github.com/dotnet/dotnet-monitor' + learningPathsDirectory: 'documentation/learningPath' + changedFilePaths: ${{ env.updated_files }} + sourceDirectoryName: 'src' + oldHash: ${{ env.prev_sha }} + newHash: ${{ github.sha }} + learningPathHashFile: '.github/learning-path-sha.txt' + + - name: Open PR + uses: ./.github/actions/open-pr + with: + files_to_commit: --all -- :!prev + title: "[REQUIRES MANUAL REVIEW] Update Learning Paths" + commit_message: Update Learning Paths + body: This PR was auto generated and will not be automatically merged in - adjustments should be made manually as-needed.
${{ steps.check-links.outputs.outputText }} + branch_name: learningPathUpdates/${{ github.sha }} + fail_if_files_unchanged: true + auth_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/lint-csharp.yml b/.github/workflows/lint-csharp.yml index 5dd9a02d517..3fd19cffec7 100644 --- a/.github/workflows/lint-csharp.yml +++ b/.github/workflows/lint-csharp.yml @@ -48,7 +48,7 @@ jobs: git diff > ./pr/linter.diff - name: Upload artifacts - uses: actions/upload-artifact@26f96dfa697d77e81fd5907df203aa23a56210a8 + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 with: name: pr-linter path: pr/ diff --git a/.github/workflows/scan-for-to-do-comments.yml b/.github/workflows/scan-for-to-do-comments.yml index e22b7ecf6bf..6aa51353744 100644 --- a/.github/workflows/scan-for-to-do-comments.yml +++ b/.github/workflows/scan-for-to-do-comments.yml @@ -27,7 +27,7 @@ jobs: COMMENT_AUTHOR: ${{ github.event.comment.user.login }} - name: Upload artifacts - uses: actions/upload-artifact@26f96dfa697d77e81fd5907df203aa23a56210a8 + uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 with: name: issue-todo path: issue/ diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml index c1b9773fa49..ef16257b7e8 100644 --- a/.github/workflows/spellcheck.yml +++ b/.github/workflows/spellcheck.yml @@ -17,7 +17,7 @@ jobs: with: persist-credentials: false - - uses: streetsidesoftware/cspell-action@61139534e14c5c19c4c12c9967771f3f20ce9072 + - uses: streetsidesoftware/cspell-action@214db1e3138f326d33b7a6a51c92852e89ab0618 name: Documentation spellcheck if: ${{ !cancelled() }} with: @@ -25,7 +25,7 @@ jobs: inline: error incremental_files_only: true - - uses: streetsidesoftware/cspell-action@61139534e14c5c19c4c12c9967771f3f20ce9072 + - uses: streetsidesoftware/cspell-action@214db1e3138f326d33b7a6a51c92852e89ab0618 name: Resx spellcheck if: ${{ !cancelled() }} with: @@ -33,7 +33,7 @@ jobs: inline: error incremental_files_only: true - - uses: streetsidesoftware/cspell-action@61139534e14c5c19c4c12c9967771f3f20ce9072 + - uses: streetsidesoftware/cspell-action@214db1e3138f326d33b7a6a51c92852e89ab0618 name: Source code spellcheck if: ${{ !cancelled() }} with: diff --git a/.github/workflows/submit-linter-suggestions.yml b/.github/workflows/submit-linter-suggestions.yml index 3dfff591640..bf8211c60b5 100644 --- a/.github/workflows/submit-linter-suggestions.yml +++ b/.github/workflows/submit-linter-suggestions.yml @@ -49,7 +49,7 @@ jobs: # The default artifact download action doesn't support cross-workflow # artifacts, so use a 3rd party one. - name: 'Download linting results' - uses: dawidd6/action-download-artifact@e7466d1a7587ed14867642c2ca74b5bcc1e19a2d + uses: dawidd6/action-download-artifact@71072fbb1229e1317f1a8de6b04206afb461bd67 with: workflow: ${{env.workflow_name}} run_id: ${{github.event.workflow_run.id }} @@ -57,7 +57,7 @@ jobs: path: ./pr-linter - name: 'Setup reviewdog' - uses: reviewdog/action-setup@4ada0d03764db369df4069af8fbc514388a3e4fe + uses: reviewdog/action-setup@1d18b2938261447f64c39f831d7395e90ef5a40e # Manually supply the triggering PR event information since when a PR is from a fork, # this workflow running in the base repo will not be given information about it. diff --git a/.github/workflows/submit-to-do-issue.yml b/.github/workflows/submit-to-do-issue.yml index 4732a600536..31b0793e112 100644 --- a/.github/workflows/submit-to-do-issue.yml +++ b/.github/workflows/submit-to-do-issue.yml @@ -35,7 +35,7 @@ jobs: # The default artifact download action doesn't support cross-workflow # artifacts, so use a 3rd party one. - name: 'Download linting results' - uses: dawidd6/action-download-artifact@e7466d1a7587ed14867642c2ca74b5bcc1e19a2d + uses: dawidd6/action-download-artifact@71072fbb1229e1317f1a8de6b04206afb461bd67 with: workflow: ${{env.workflow_name}} run_id: ${{github.event.workflow_run.id }} diff --git a/.github/workflows/sync-branches.yml b/.github/workflows/sync-branches.yml index 5840b059dd2..0a2e22e001a 100644 --- a/.github/workflows/sync-branches.yml +++ b/.github/workflows/sync-branches.yml @@ -10,12 +10,12 @@ env: base_branch: 'origin/main' jobs: - sync-branch: + sync-non-code: if: github.repository == 'dotnet/dotnet-monitor' strategy: matrix: - branch: ["release/6.x", "release/7.x"] - name: 'Sync ${{ matrix.branch }}' + branch: ["release/6.x", "release/7.x", "release/8.0"] + name: 'Sync non-code to ${{ matrix.branch }}' runs-on: ubuntu-latest permissions: contents: write @@ -31,7 +31,7 @@ jobs: - name: Sync branch run: | - git checkout "$base_branch" -- \ + git checkout --no-overlay "$base_branch" -- \ ".github" \ ".devcontainer" \ ".vscode" \ @@ -39,14 +39,61 @@ jobs: "cspell.json" \ "documentation/**.md" + - name: Open PR + uses: ./.github/actions/open-pr + with: + files_to_commit: "*" + title: '[${{ matrix.branch }}] Sync non-code with ${{ env.base_branch }}' + commit_message: sync non-code with ${{ env.base_branch }} + body: Sync non-code with ${{ env.base_branch }}. This PR was auto generated and will not be automatically merged in. + branch_name: sync/${{ matrix.branch }} + fail_if_files_unchanged: false + labels: 'automatic-pr' + auth_token: ${{ secrets.GITHUB_TOKEN }} + + sync-code: + if: github.repository == 'dotnet/dotnet-monitor' + strategy: + matrix: + branch: ["feature/9.x"] + name: 'Sync branch with ${{ matrix.branch }}' + runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + + steps: + - name: Checkout + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + with: + persist-credentials: true # We need to persist credentials to push the resulting changes upstream. + fetch-depth: 0 # Fetch the entire repo for the below git operations + ref: ${{ matrix.branch }} + + - name: Sync branch + run: | + git config user.name "github-actions[bot]" + git config user.email "41898282+github-actions[bot]@users.noreply.github.com" + # Activate the ours merge driver to respect any branch specific files + git config merge.ours.driver true + git merge "$base_branch" --strategy=ort --strategy-option=theirs + + matches=$(perl -ne '/^([^\s]+)\s+merge=ours/gm && print "$1 "' .gitattributes) + for match in $matches; do + git checkout --no-overlay "$target_branch" -- "$match" + done + env: + target_branch: origin/${{ matrix.branch }} + - name: Open PR uses: ./.github/actions/open-pr with: files_to_commit: "*" title: '[${{ matrix.branch }}] Sync branch with ${{ env.base_branch }}' - commit_message: sync branch with ${{ env.base_branch }} + commit_message: Restore branch-specific files body: Sync branch with ${{ env.base_branch }}. This PR was auto generated and will not be automatically merged in. branch_name: sync/${{ matrix.branch }} fail_if_files_unchanged: false + always_create_pr: true labels: 'automatic-pr' auth_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/Directory.Packages.props b/Directory.Packages.props index 588553accaa..265fb7bef1d 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -22,8 +22,8 @@ + - diff --git a/documentation/README.md b/documentation/README.md index 1f9febac155..e287d4b3d6a 100644 --- a/documentation/README.md +++ b/documentation/README.md @@ -17,7 +17,7 @@ When running a dotnet application, differences in diverse local and production e - [Running in Kubernetes](./kubernetes.md) - Enabling SSL - [API Endpoints](./api/README.md) - - [Swagger UI & OpenAPI document](./swagger-ui.md) + - [OpenAPI document](./openapi.md) - [`/processes`](./api/processes.md) - [`/dump`](./api/dump.md) - [`/gcdump`](./api/gcdump.md) diff --git a/documentation/api/definitions.md b/documentation/api/definitions.md index a2365f758d1..822812def9f 100644 --- a/documentation/api/definitions.md +++ b/documentation/api/definitions.md @@ -97,6 +97,7 @@ Object describing the list of methods to capture parameters for. |---|---|---| | `methods` | [MethodDescription](#methoddescription)[] | Array of methods to capture parameters for. | | `useDebuggerDisplayAttribute` | bool | Determines if parameters should be formatted using their [`DebuggerDisplayAttribute`](https://learn.microsoft.com/dotnet/api/system.diagnostics.debuggerdisplayattribute) if available and supported. Expressions in attributes may consist of properties, fields, methods without parameters, or any combination of these. | +| `captureLimit` | int | The number of times to capture parameters before stopping. If the specified duration elapses the operation will stop even if the capture limit is not yet reached. Note that parameters may continue to be captured for a short amount of time after this limit is reached. | ## DotnetMonitorInfo diff --git a/documentation/api/parameters.md b/documentation/api/parameters.md index ed580d79f82..938eda5a08b 100644 --- a/documentation/api/parameters.md +++ b/documentation/api/parameters.md @@ -115,7 +115,8 @@ Authorization: Bearer fffffffffffffffffffffffffffffffffffffffffff= "typeName": "System.String", "methodName": "Concat" } - ] + ], + "captureLimit": 2 } ``` diff --git a/documentation/authentication.md b/documentation/authentication.md index 5ffc5886a06..aa58a757870 100644 --- a/documentation/authentication.md +++ b/documentation/authentication.md @@ -53,16 +53,6 @@ curl -H "Authorization: Bearer " https://localhost:52323/p (Invoke-WebRequest -Uri https://localhost:52323/processes -Headers @{ 'Authorization' = 'Bearer ' }).Content | ConvertFrom-Json ``` -### Interactively authenticating using the Swagger UI - -If you want Azure AD users to be able to interactively authenticate with your `dotnet monitor` instance using the in-box Swagger UI you will need to: -- [Add a new scope](https://learn.microsoft.com/azure/active-directory/develop/quickstart-configure-app-expose-web-apis#add-a-scope). This scope will only be used to enable interactive authentication and users will still be required to be part of the configured app role. Custom Application ID URIs are supported. -- [Add a redirect URI to the App Registration](https://learn.microsoft.com/azure/active-directory/develop/quickstart-register-app#add-a-redirect-uri). - 1. Select `Single-page application` as the platform. - 1. For the redirect URI, enter `{dotnet monitor address}/swagger/oauth2-redirect.html`, where `{dotnet monitor address}` is the address of your `dotnet monitor` instance. -> [!NOTE] -> If using `localhost` for the address, you do **not** need to specify the port number. Example: `https://localhost/swagger/oauth2-redirect.html` - ## Windows Authentication We only recommend using Windows Authentication if you're running `dotnet monitor` as a local development tool on Windows; for all other environments using an [API Key](#api-key-authentication) is recommended. diff --git a/documentation/configuration/azure-ad-authentication-configuration.md b/documentation/configuration/azure-ad-authentication-configuration.md index 2ceda3b3b34..6cae163ab41 100644 --- a/documentation/configuration/azure-ad-authentication-configuration.md +++ b/documentation/configuration/azure-ad-authentication-configuration.md @@ -13,7 +13,6 @@ Azure Active Directory authentication must be configured before `dotnet monitor` |---|---|---|---| | ClientId | string | true | The unique application (client) id assigned to the app registration in Azure Active Directory. | | RequiredRole | string | true | The role required to be able to authenticate. | -| SwaggerScope | string | false | The API scope required by users to be able to interactively authenticate using the in-box Swagger UI. If not specified, users will not be able to interactively authenticate. | | AppIdUri | uri | false | The App ID URI of the app registration. Defaults to `api://{ClientId}` if not specified. | | Instance | uri | false | Specifies the Azure cloud instance users are signing in from. Can be either the Azure public cloud or one of the national clouds. Defaults to the Azure public cloud (`https://login.microsoftonline.com`). | | TenantId | string | false | The tenant id of the Azure Active Directory tenant, or its tenant domain. Defaults to `organizations`. | diff --git a/documentation/learningPath/aks.md b/documentation/learningPath/aks.md index 0bb619da2e2..918b2678207 100644 --- a/documentation/learningPath/aks.md +++ b/documentation/learningPath/aks.md @@ -9,7 +9,7 @@ In addition to its availability as a .NET CLI tool, the `dotnet monitor` tool is This workflow takes your local development copy of `dotnet-monitor`, patches it with a local development copy of the [.NET Core Diagnostics Repo](https://github.com/dotnet/diagnostics#net-core-diagnostics-repo), and makes it available as an image for you to consume in an ACR (Azure Container Registry). Note that there are many other ways to do this - this is meant to serve as a basic template that can be adapted to match your needs. -1. Open `pwsh` and run the [generate-dev-sln script](https://github.com/dotnet/dotnet-monitor/blob/main/generate-dev-sln.ps1), providing a path to your local copy of the diagnostics repo. +1. Open `pwsh` and run the [generate-dev-sln script](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/generate-dev-sln.ps1), providing a path to your local copy of the diagnostics repo. > [!NOTE] > If your changes do not involve the [.NET Core Diagnostics Repo](https://github.com/dotnet/diagnostics#net-core-diagnostics-repo), you don't need to complete this step. diff --git a/documentation/learningPath/api.md b/documentation/learningPath/api.md index de42e4e36d4..c16ff1b863d 100644 --- a/documentation/learningPath/api.md +++ b/documentation/learningPath/api.md @@ -7,15 +7,15 @@ dotnet-monitor exposes functionality through both [collection rules](./collectio ## Adding New APIs -The web API surface is defined by a series of controllers [here](../../src/Microsoft.Diagnostics.Monitoring.WebApi/Controllers/). It's common for an API to expose functionality also available via [Actions](./collectionrules.md#actions) and so methods in these controllers are often wrappers around a shared implementation. Each controller may have one or more attributes that configure how and where it is exposed, you can learn more about the notable controller attributes [here](#notable-controller-attributes). +The web API surface is defined by a series of controllers [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Microsoft.Diagnostics.Monitoring.WebApi/Controllers/). It's common for an API to expose functionality also available via [Actions](./collectionrules.md#actions) and so methods in these controllers are often wrappers around a shared implementation. Each controller may have one or more attributes that configure how and where it is exposed, you can learn more about the notable controller attributes [here](#notable-controller-attributes). -If the new API needs to either accept or return structured data, a dedicated model should be used. Models are defined [here](../../src/Microsoft.Diagnostics.Monitoring.WebApi/Models/). +If the new API needs to either accept or return structured data, a dedicated model should be used. Models are defined [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Microsoft.Diagnostics.Monitoring.WebApi/Models/). When adding a new API, it's important to also update the [`openapi.json`](../openapi.json) spec which describes the API surface. There are CI tests that will ensure this file has been updated to reflect any API changes. Learn more about updating `openapi.json` [here](./testing.md#openapi-generation). ### Adding Tests -Web APIs in dotnet-monitor are typically tested using functional tests that leverage the [ApiClient](../../src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/HttpApi/ApiClient.cs) to call a specific API. Learn more about how the functional tests are defined and operate [here](./testing.md#functional-tests). +Web APIs in dotnet-monitor are typically tested using functional tests that leverage the [ApiClient](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/HttpApi/ApiClient.cs) to call a specific API. Learn more about how the functional tests are defined and operate [here](./testing.md#functional-tests). ## Notable Controller Attributes @@ -35,6 +35,6 @@ dotnet-monitor supports multiple different [authentication modes](../authenticat ### Determining Authentication Mode -When dotnet-monitor starts, the command line arguments are first inspected to see if a specific authentication mode was set (such as `--no-auth`), referred to as the `StartupAuthenticationMode`, this is calculated [here](../../src/Tools/dotnet-monitor/Commands/CollectCommandHandler.cs#L27). If no modes were explicitly set via a command line argument, dotnet-monitor will select `Deferred` as the `StartupAuthenticationMode`. This indicates that the user configuration should be looked at to determine the authentication mode later on in the startup process. +When dotnet-monitor starts, the command line arguments are first inspected to see if a specific authentication mode was set (such as `--no-auth`), referred to as the `StartupAuthenticationMode`, this is calculated [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/Commands/CollectCommandHandler.cs#L29). If no modes were explicitly set via a command line argument, dotnet-monitor will select `Deferred` as the `StartupAuthenticationMode`. This indicates that the user configuration should be looked at to determine the authentication mode later on in the startup process. -After determining the `StartupAuthenticationMode` mode, the relevant [IAuthenticationConfigurator](../../src/Tools/dotnet-monitor/Auth/IAuthenticationConfigurator.cs) is created by the [AuthConfiguratorFactory](../../src/Tools/dotnet-monitor/Auth/AuthConfiguratorFactory.cs). This factory also handles deciding what authentication mode to use when `StartupAuthenticationMode` is `Deferred`. The selected configurator is used to configure various parts of dotnet-monitor that are specific to authentication, such as protecting the web APIs, add authentication-mode specific logging, and configuring the built-in Swagger UI. +After determining the `StartupAuthenticationMode` mode, the relevant [IAuthenticationConfigurator](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/Auth/IAuthenticationConfigurator.cs) is created by the [AuthConfiguratorFactory](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/Auth/AuthConfiguratorFactory.cs). This factory also handles deciding what authentication mode to use when `StartupAuthenticationMode` is `Deferred`. The selected configurator is used to configure various parts of dotnet-monitor that are specific to authentication, such as protecting the web APIs and adding authentication-mode specific logging. diff --git a/documentation/learningPath/collectionrules.md b/documentation/learningPath/collectionrules.md index 6ea6c9ce47e..219b2b3c9c9 100644 --- a/documentation/learningPath/collectionrules.md +++ b/documentation/learningPath/collectionrules.md @@ -32,49 +32,49 @@ graph LR ### Key Areas Of The Code -* Collection rules are registered [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/ServiceCollectionExtensions.cs#L100). When adding a new trigger or action, these types need to be added here to take effect. This section is also responsible for making sure options get configured and validated. -* Options for collection rules can be found [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/Options/CollectionRuleOptions.cs). -* Rules are applied, removed, and restarted in response to configuration changes [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/CollectionRuleService.cs). This is also responsible for generating a description of each collection rule's state for the `/collectionrules` API Endpoint. -* The pipeline responsible for the lifetime of a single executing collection rule can be found [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/CollectionRulePipeline.cs#L55). -* To run collection rules, `dotnet monitor` must be in `Listen` mode - this is set via [DiagnosticPortOptions](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Microsoft.Diagnostics.Monitoring.Options/DiagnosticPortOptions.cs). +* Collection rules are registered [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/ServiceCollectionExtensions.cs#L140). When adding a new trigger or action, these types need to be added here to take effect. This section is also responsible for making sure options get configured and validated. +* Options for collection rules can be found [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/Options/CollectionRuleOptions.cs). +* Rules are applied, removed, and restarted in response to configuration changes [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/CollectionRuleService.cs). This is also responsible for generating a description of each collection rule's state for the `/collectionrules` API Endpoint. +* The pipeline responsible for the lifetime of a single executing collection rule can be found [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/CollectionRulePipeline.cs#L54). +* To run collection rules, `dotnet monitor` must be in `Listen` mode - this is set via [DiagnosticPortOptions](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Microsoft.Diagnostics.Monitoring.Options/DiagnosticPortOptions.cs). * For each type of trigger, the [dotnet diagnostics repo](https://github.com/dotnet/diagnostics/blob/v6.0.351802/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/ITraceEventTrigger.cs#L29) is responsible for determining whether the triggering conditions have been satisfied. ### Triggers -A trigger will monitor for a specific condition in the target application and raise a notification when that condition has been observed. Options for triggers can be found [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/Options/CollectionRuleTriggerOptions.cs); the type of `Settings` is determined by which trigger is being used (possible trigger types can be found [here](https://github.com/dotnet/dotnet-monitor/tree/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/Options/Triggers)). The interface for all triggers can be found [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/Triggers/ICollectionRuleTrigger.cs) - this allows `dotnet monitor` to start and stop triggers, regardless of the trigger's properties. The collection rule pipeline creates instances of triggers [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/CollectionRulePipeline.cs#L100) before waiting for the trigger to [satisfy its conditions](https://github.com/dotnet/diagnostics/blob/v6.0.351802/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/Pipelines/TraceEventTriggerPipeline.cs#L107) - each trigger has its own set of criteria that determines when a trigger has been satisfied. +A trigger will monitor for a specific condition in the target application and raise a notification when that condition has been observed. Options for triggers can be found [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/Options/CollectionRuleTriggerOptions.cs); the type of `Settings` is determined by which trigger is being used (possible trigger types can be found [here](https://github.com/dotnet/dotnet-monitor/tree/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/Options/Triggers)). The interface for all triggers can be found [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/Triggers/ICollectionRuleTrigger.cs) - this allows `dotnet monitor` to start and stop triggers, regardless of the trigger's properties. The collection rule pipeline creates instances of triggers [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/CollectionRulePipeline.cs#L99) before waiting for the trigger to [satisfy its conditions](https://github.com/dotnet/diagnostics/blob/v6.0.351802/src/Microsoft.Diagnostics.Monitoring.EventPipe/Triggers/Pipelines/TraceEventTriggerPipeline.cs#L107) - each trigger has its own set of criteria that determines when a trigger has been satisfied. ### Actions -Actions allow executing an operation or an external executable in response to a trigger condition being satisfied. Options for actions can be found [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/Options/CollectionRuleActionOptions.cs); the type of `Settings` is determined by which action is being used (possible action types can be found [here](https://github.com/dotnet/dotnet-monitor/tree/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/Options/Actions)). The interface for all actions can be found [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/Actions/ICollectionRuleAction.cs) - this allows `dotnet monitor` to start an action, wait for it to complete, and get its output values regardless of the action's properties. The action list is [executed](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/CollectionRulePipeline.cs#L150) once the triggering condition has been met (assuming the action list isn't throttled), with each action by default starting without waiting for prior actions to complete. +Actions allow executing an operation or an external executable in response to a trigger condition being satisfied. Options for actions can be found [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/Options/CollectionRuleActionOptions.cs); the type of `Settings` is determined by which action is being used (possible action types can be found [here](https://github.com/dotnet/dotnet-monitor/tree/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/Options/Actions)). The interface for all actions can be found [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/Actions/ICollectionRuleAction.cs) - this allows `dotnet monitor` to start an action, wait for it to complete, and get its output values regardless of the action's properties. The action list is [executed](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/CollectionRulePipeline.cs#L149) once the triggering condition has been met (assuming the action list isn't throttled), with each action by default starting without waiting for prior actions to complete. ### Filters -Filters can optionally be applied to a collection rule to choose which processes can trigger the rule. This uses the same set of [options](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Microsoft.Diagnostics.Monitoring.Options/ProcessFilterOptions.cs#L47) as setting the default process for `dotnet-monitor`. When starting a collection rule, [these filters are used to check if the current process should have the collection rule applied to it](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/CollectionRuleContainer.cs#L189); if so, the collection rule starts. +Filters can optionally be applied to a collection rule to choose which processes can trigger the rule. This uses the same set of [options](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Microsoft.Diagnostics.Monitoring.Options/ProcessFilterOptions.cs#L47) as setting the default process for `dotnet-monitor`. When starting a collection rule, [these filters are used to check if the current process should have the collection rule applied to it](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/CollectionRuleContainer.cs#L187); if so, the collection rule starts. ### Limits -Limits can optionally be applied to a collection rule to constrain the lifetime of the rule and how often its actions can be run before being throttled. Options for limits can be found [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/Options/CollectionRuleLimitsOptions.cs). When provided (or when using default values), limits are evaluated in the collection rule pipeline while running. `RuleDuration` is used to [create a token](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/CollectionRulePipeline.cs#L80) that shuts down the pipeline. `ActionCountSlidingWindowDuration` does not rely on setting cancellation tokens; rather, the number of executions within the sliding window are checked on-demand [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Microsoft.Diagnostics.Monitoring.WebApi/CollectionRulePipelineState.cs#L212), and `ActionCount` is referenced to determine whether the rule needs to [terminate](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Microsoft.Diagnostics.Monitoring.WebApi/CollectionRulePipelineState.cs#L195) or [throttle](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Microsoft.Diagnostics.Monitoring.WebApi/CollectionRulePipelineState.cs#L235). +Limits can optionally be applied to a collection rule to constrain the lifetime of the rule and how often its actions can be run before being throttled. Options for limits can be found [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/Options/CollectionRuleLimitsOptions.cs). When provided (or when using default values), limits are evaluated in the collection rule pipeline while running. `RuleDuration` is used to [create a token](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/CollectionRulePipeline.cs#L79) that shuts down the pipeline. `ActionCountSlidingWindowDuration` does not rely on setting cancellation tokens; rather, the number of executions within the sliding window are checked on-demand [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Microsoft.Diagnostics.Monitoring.WebApi/CollectionRulePipelineState.cs#L211), and `ActionCount` is referenced to determine whether the rule needs to [terminate](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Microsoft.Diagnostics.Monitoring.WebApi/CollectionRulePipelineState.cs#L194) or [throttle](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Microsoft.Diagnostics.Monitoring.WebApi/CollectionRulePipelineState.cs#L234). ## Miscellaneous ### Trigger Shortcuts -Trigger Shortcuts provide improved defaults, range validation, and a simpler syntax for [several commonly used `EventCounter` triggers](https://github.com/dotnet/dotnet-monitor/tree/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/Options/Triggers/EventCounterShortcuts). These shortcuts provide the same functionality as using the standard `EventCounter` syntax, but have fewer available options (since there is no need to specify the `ProviderName` or the `CounterName`) - as a result, shortcuts do not inherit from `EventCounterOptions`, but rather [IEventCounterShortcuts](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/Options/Triggers/EventCounterShortcuts/IEventCounterShortcuts.cs). Each type of shortcut is registered independently [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/ServiceCollectionExtensions.cs#L117). After binding with configuration and undergoing validation, shortcuts are then converted to be treated as `EventCounter` triggers [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/Triggers/EventCounterTriggerFactory.cs), using their respective defaults instead of the generic ones. +Trigger Shortcuts provide improved defaults, range validation, and a simpler syntax for [several commonly used `EventCounter` triggers](https://github.com/dotnet/dotnet-monitor/tree/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/Options/Triggers/EventCounterShortcuts). These shortcuts provide the same functionality as using the standard `EventCounter` syntax, but have fewer available options (since there is no need to specify the `ProviderName` or the `CounterName`) - as a result, shortcuts do not inherit from `EventCounterOptions`, but rather [IEventCounterShortcuts](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/Options/Triggers/EventCounterShortcuts/IEventCounterShortcuts.cs). Each type of shortcut is registered independently [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/ServiceCollectionExtensions.cs#L158). After binding with configuration and undergoing validation, shortcuts are then converted to be treated as `EventCounter` triggers [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/Triggers/EventCounterTriggerFactory.cs), using their respective defaults instead of the generic ones. ### Templates -Templates allow users to design reusable collection rule components by associating a name with a piece of configuration. Options for templates can be found [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/Options/TemplateOptions.cs). Before collection rules undergo validation, `dotnet monitor` checks to see if any of the rule's components in configuration [list the name of a template](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/Configuration/CollectionRulePostConfigureNamedOptions.cs) - if so, the collection rule's options are populated from the correspondingly named template. Note that templates undergo the same binding process for triggers/actions as collection rules; however, since templates are treated as separate parts of configuration, this binding instead happens [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/Configuration/TemplatesConfigureNamedOptions.cs). +Templates allow users to design reusable collection rule components by associating a name with a piece of configuration. Options for templates can be found [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/Options/TemplateOptions.cs). Before collection rules undergo validation, `dotnet monitor` checks to see if any of the rule's components in configuration [list the name of a template](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/Configuration/CollectionRulePostConfigureNamedOptions.cs) - if so, the collection rule's options are populated from the correspondingly named template. Note that templates undergo the same binding process for triggers/actions as collection rules; however, since templates are treated as separate parts of configuration, this binding instead happens [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/Configuration/TemplatesConfigureNamedOptions.cs). ### Collection Rule Defaults -Defaults can be used to limit the verbosity of configuration, allowing frequently used values for collection rules to be assigned as defaults. Options for collection rule defaults can be found [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/Options/CollectionRuleDefaultsOptions.cs). These defaults are merged with the user's provided configuration [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Tools/dotnet-monitor/CollectionRules/Options/DefaultCollectionRulePostConfigureOptions.cs) - any properties that the user hasn't set (that have corresponding default values) will be updated at this point to use the default values. This step occurs prior to `dotnet monitor` attempting to use its built-in defaults, which allows user defaults to take precedence. +Defaults can be used to limit the verbosity of configuration, allowing frequently used values for collection rules to be assigned as defaults. Options for collection rule defaults can be found [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/Options/CollectionRuleDefaultsOptions.cs). These defaults are merged with the user's provided configuration [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/CollectionRules/Options/DefaultCollectionRulePostConfigureOptions.cs) - any properties that the user hasn't set (that have corresponding default values) will be updated at this point to use the default values. This step occurs prior to `dotnet monitor` attempting to use its built-in defaults, which allows user defaults to take precedence. ### Collection Rule API Endpoint -The Collection Rule API Endpoint allows users to get information about the state of their collection rules, providing general information [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Microsoft.Diagnostics.Monitoring.WebApi/Controllers/DiagController.cs#L532) and more specific information about a particular rule [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Microsoft.Diagnostics.Monitoring.WebApi/Controllers/DiagController.cs#L557). **This API is solely for viewing the current state of rules, not altering state**. +The Collection Rule API Endpoint allows users to get information about the state of their collection rules, providing general information [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Microsoft.Diagnostics.Monitoring.WebApi/Controllers/DiagController.cs#L525) and more specific information about a particular rule [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Microsoft.Diagnostics.Monitoring.WebApi/Controllers/DiagController.cs#L550). **This API is solely for viewing the current state of rules, not altering state**. -Each collection rule pipeline has a [state holder](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/src/Microsoft.Diagnostics.Monitoring.WebApi/CollectionRulePipelineState.cs) that keeps track of the rule's execution. By keeping track of the pipeline's state in real-time, this state doesn't need to be calculated in response to a user hitting the `/collectionrules` endpoint. However, other user-facing information, such as countdowns, are calculated on-demand - these values are solely for display purposes and not used by `dotnet-monitor` when determining when to change state (see [Limits](#limits) for more information). +Each collection rule pipeline has a [state holder](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Microsoft.Diagnostics.Monitoring.WebApi/CollectionRulePipelineState.cs) that keeps track of the rule's execution. By keeping track of the pipeline's state in real-time, this state doesn't need to be calculated in response to a user hitting the `/collectionrules` endpoint. However, other user-facing information, such as countdowns, are calculated on-demand - these values are solely for display purposes and not used by `dotnet-monitor` when determining when to change state (see [Limits](#limits) for more information). ## Keeping Documentation Up-To-Date -When making changes to collection rules that require updates to configuration, these changes should be added [here](https://github.com/dotnet/dotnet-monitor/blob/v7.0.1/documentation/configuration.md#collection-rule-configuration). Additional information on collection rules and examples can be provided [here](https://github.com/dotnet/dotnet-monitor/tree/v7.0.1/documentation/collectionrules). +When making changes to collection rules that require updates to configuration, these changes should be added [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/documentation/configuration/collection-rule-configuration.md). Additional information on collection rules and examples can be provided [here](https://github.com/dotnet/dotnet-monitor/tree/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/documentation/collectionrules). diff --git a/documentation/learningPath/configuration.md b/documentation/learningPath/configuration.md index 53242df9928..1c5cacf3fc9 100644 --- a/documentation/learningPath/configuration.md +++ b/documentation/learningPath/configuration.md @@ -6,22 +6,22 @@ ## How Configuration Works -`dotnet-monitor` accepts configuration from several different sources, and must [combine these sources for the host builder](https://github.com/dotnet/dotnet-monitor/blob/ba8c36235943562581b666e74ef07954313eda56/src/Tools/dotnet-monitor/HostBuilder/HostBuilderHelper.cs#L46). Configuration sources are added in the order of lowest to highest precedence - meaning that if there is a conflict between a property in two configuration sources, the property found in the latter configuration source will be used. +`dotnet-monitor` accepts configuration from several different sources, and must [combine these sources for the host builder](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/HostBuilder/HostBuilderHelper.cs#L47). Configuration sources are added in the order of lowest to highest precedence - meaning that if there is a conflict between a property in two configuration sources, the property found in the latter configuration source will be used. -To see the merged configuration, the user can run the `config show` command (see [here](https://github.com/dotnet/dotnet-monitor/blob/ba8c36235943562581b666e74ef07954313eda56/src/Tools/dotnet-monitor/Program.cs#L69) and [here](https://github.com/dotnet/dotnet-monitor/blob/ba8c36235943562581b666e74ef07954313eda56/src/Tools/dotnet-monitor/Commands/ConfigShowCommandHandler.cs)); the `--show-sources` flag can be used to reveal which configuration source is responsible for each property. The `config show` command's output is [written out as JSON](https://github.com/dotnet/dotnet-monitor/blob/ba8c36235943562581b666e74ef07954313eda56/src/Tools/dotnet-monitor/ConfigurationJsonWriter.cs); this section must be manually updated whenever new options are added (or existing options are changed). +To see the merged configuration, the user can run the `config show` command (see [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/Program.cs#L68) and [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/Commands/ConfigShowCommandHandler.cs)); the `--show-sources` flag can be used to reveal which configuration source is responsible for each property. The `config show` command's output is [written out as JSON](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/ConfigurationJsonWriter.cs); this section must be manually updated whenever new options are added (or existing options are changed). -Once configuration has been merged, any singletons that have been added to the `IServiceCollection` (see [here](https://github.com/dotnet/dotnet-monitor/blob/ba8c36235943562581b666e74ef07954313eda56/src/Tools/dotnet-monitor/ServiceCollectionExtensions.cs) and [here](https://github.com/dotnet/dotnet-monitor/blob/ba8c36235943562581b666e74ef07954313eda56/src/Tools/dotnet-monitor/Commands/CollectCommandHandler.cs#L80)), such as `IConfigureOptions`, `IPostConfigureOptions`, and `IValidateOptions`, are called when an object of that type is first used, **not on startup**. This step is often used to incorporate defaults for properties that were not explicitly set by configuration, or to validate that options were set correctly. +Once configuration has been merged, any singletons that have been added to the `IServiceCollection` (see [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/ServiceCollectionExtensions.cs) and [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/Commands/CollectCommandHandler.cs#L85)), such as `IConfigureOptions`, `IPostConfigureOptions`, and `IValidateOptions`, are called when an object of that type is first used, **not on startup**. This step is often used to incorporate defaults for properties that were not explicitly set by configuration, or to validate that options were set correctly. -Any changes to the configuration need to be propagated to the [schema](https://github.com/dotnet/dotnet-monitor/blob/ba8c36235943562581b666e74ef07954313eda56/documentation/schema.json). **The updated schema should be generated automatically; you should never need to manually edit the JSON.** To update the schema in Visual Studio: -* Set [Microsoft.Diagnostics.Monitoring.ConfigurationSchema](https://github.com/dotnet/dotnet-monitor/tree/ba8c36235943562581b666e74ef07954313eda56/src/Tests/Microsoft.Diagnostics.Monitoring.ConfigurationSchema) as the startup project +Any changes to the configuration need to be propagated to the [schema](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/documentation/schema.json). **The updated schema should be generated automatically; you should never need to manually edit the JSON.** To update the schema in Visual Studio: +* Set [Microsoft.Diagnostics.Monitoring.ConfigurationSchema](https://github.com/dotnet/dotnet-monitor/tree/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.ConfigurationSchema) as the startup project * Build the project, with a single command-line argument for the schema's absolute path -* Validate that the schema was correctly updated using the tests in [Microsoft.Diagnostics.Monitoring.ConfigurationSchema.UnitTests](https://github.com/dotnet/dotnet-monitor/tree/ba8c36235943562581b666e74ef07954313eda56/src/Tests/Microsoft.Diagnostics.Monitoring.ConfigurationSchema.UnitTests) +* Validate that the schema was correctly updated using the tests in [Microsoft.Diagnostics.Monitoring.ConfigurationSchema.UnitTests](https://github.com/dotnet/dotnet-monitor/tree/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.ConfigurationSchema.UnitTests) ## Keeping Documentation Up-To-Date -Our configuration is primarily documented [here](https://github.com/dotnet/dotnet-monitor/tree/main/documentation/configuration). Sections are typically comprised of: +Our configuration is primarily documented [here](https://github.com/dotnet/dotnet-monitor/tree/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/documentation/configuration). Sections are typically comprised of: * A brief overview of the feature that is being configured * Configuration samples in all supported formats * A list of properties with descriptions, types, and whether a property is required -Types are defined in [definitions.md](https://github.com/dotnet/dotnet-monitor/blob/main/documentation/api/definitions.md), and additional information about configuring collection rules can be found in the [collection rules](https://github.com/dotnet/dotnet-monitor/blob/main/documentation/collectionrules) directory. Where appropriate, indicate if configuration only pertains to a specific version of `dotnet-monitor` (e.g. `7.0+`). +Types are defined in [definitions.md](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/documentation/api/definitions.md), and additional information about configuring collection rules can be found in the [collection rules](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/documentation/collectionrules) directory. Where appropriate, indicate if configuration only pertains to a specific version of `dotnet-monitor` (e.g. `7.0+`). diff --git a/documentation/learningPath/egress.md b/documentation/learningPath/egress.md index db4d712c490..36b9c0894c0 100644 --- a/documentation/learningPath/egress.md +++ b/documentation/learningPath/egress.md @@ -26,11 +26,11 @@ graph LR class ide2 altColor ``` -1. [User initiates collection of artifact with a designated egress provider](https://github.com/dotnet/dotnet-monitor/blob/289105261537f3977f7d1886f936d19bb3639d46/src/Microsoft.Diagnostics.Monitoring.WebApi/Operation/EgressOperation.cs#L49) -1. [Locate extension's executable and manifest](https://github.com/dotnet/dotnet-monitor/blob/289105261537f3977f7d1886f936d19bb3639d46/src/Tools/dotnet-monitor/Extensibility/ExtensionDiscoverer.cs#L28) -1. [Start extension and pass configuration/artifact via StdIn to the other process](https://github.com/dotnet/dotnet-monitor/blob/289105261537f3977f7d1886f936d19bb3639d46/src/Tools/dotnet-monitor/Egress/Extension/EgressExtension.cs#L102) -1. [Connect to egress provider using configuration and send artifact](https://github.com/dotnet/dotnet-monitor/blob/289105261537f3977f7d1886f936d19bb3639d46/src/Extensions/AzureBlobStorage/AzureBlobEgressProvider.cs#L35) -1. [Provide success/failure information via StdOut to dotnet-monitor](https://github.com/dotnet/dotnet-monitor/blob/289105261537f3977f7d1886f936d19bb3639d46/src/Microsoft.Diagnostics.Monitoring.Extension.Common/EgressHelper.cs#L77) +1. [User initiates collection of artifact with a designated egress provider](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Microsoft.Diagnostics.Monitoring.WebApi/Operation/EgressOperation.cs#L45) +1. [Locate extension's executable and manifest](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/Extensibility/ExtensionDiscoverer.cs#L28) +1. [Start extension and pass configuration/artifact via StdIn to the other process](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/Egress/Extension/EgressExtension.cs#L102) +1. [Connect to egress provider using configuration and send artifact](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Extensions/AzureBlobStorage/AzureBlobEgressProvider.cs#L36) +1. [Provide success/failure information via StdOut to dotnet-monitor](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Microsoft.Diagnostics.Monitoring.Extension.Common/EgressHelper.cs#L77) ## Distribution and Acquisition Model @@ -41,7 +41,7 @@ There are two versions of the `dotnet-monitor` image being offered: `monitor` an ### Well Known Egress Provider Locations -There are 3 [locations](https://github.com/dotnet/dotnet-monitor/blob/289105261537f3977f7d1886f936d19bb3639d46/src/Tools/dotnet-monitor/ServiceCollectionExtensions.cs#L260) that `dotnet-monitor` scans when looking for the extensions directory (the highest priority location is listed first): +There are 3 [locations](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/ServiceCollectionExtensions.cs#L279) that `dotnet-monitor` scans when looking for the extensions directory (the highest priority location is listed first): - Next to the executing `dotnet-monitor` assembly - SharedConfigDirectory - On Windows, `%ProgramData%\dotnet-monitor` @@ -59,23 +59,23 @@ The distribution/acquisition model for third-party egress providers is determine ### Extension Manifest -All extensions must include a manifest titled [`extension.json`](https://github.com/dotnet/dotnet-monitor/blob/289105261537f3977f7d1886f936d19bb3639d46/src/Extensions/AzureBlobStorage/extension.json) that provides `dotnet-monitor` with some basic information about the extension. +All extensions must include a manifest titled [`extension.json`](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Extensions/AzureBlobStorage/extension.json) that provides `dotnet-monitor` with some basic information about the extension. | Name | Required | Type | Description | |---|---|---|---| | `Name` | true | string | The name of the extension (e.g. AzureBlobStorage) that users will use when writing configuration for the egress provider. | | `ExecutableFileName` | false | string | If specified, the executable file (without extension) to be launched when executing the extension; either `AssemblyFileName` or `ExecutableFileName` must be specified. | | `AssemblyFileName` | false | string | If specified, executes the extension using the shared .NET host (e.g. dotnet.exe) with the specified entry point assembly (without extension); either `AssemblyFileName` or `ExecutableFileName` must be specified. | -| `Modes` | false | [[ExtensionMode](../api/definitions.md#extensionmode)] | Additional modes the extension can be configured to run in (see an example of Validation [here](https://github.com/dotnet/dotnet-monitor/blob/289105261537f3977f7d1886f936d19bb3639d46/src/Microsoft.Diagnostics.Monitoring.Extension.Common/EgressHelper.cs#L80)). | +| `Modes` | false | [[ExtensionMode](../api/definitions.md#extensionmode)] | Additional modes the extension can be configured to run in (see an example of Validation [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Microsoft.Diagnostics.Monitoring.Extension.Common/EgressHelper.cs#L80)). | ### Configuration Extensions are designed to receive all user configuration through `dotnet monitor` - the extension itself should not rely on any additional configuration sources. -In addition to the configuration provided specifically for your egress provider, `dotnet-monitor` also includes the values stored in [`Properties`](https://github.com/dotnet/dotnet-monitor/blob/289105261537f3977f7d1886f936d19bb3639d46/src/Microsoft.Diagnostics.Monitoring.Options/EgressOptions.cs#L21). Note that `Properties` may include information that is not relevant to the current egress provider, since it is a shared bucket between all configured egress providers. +In addition to the configuration provided specifically for your egress provider, `dotnet-monitor` also includes the values stored in [`Properties`](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Microsoft.Diagnostics.Monitoring.Options/EgressOptions.cs#L21). Note that `Properties` may include information that is not relevant to the current egress provider, since it is a shared bucket between all configured egress providers. ### Communicating With Dotnet-Monitor -[`dotnet monitor` will pass serialized configuration via `StdIn` to the extension](https://github.com/dotnet/dotnet-monitor/blob/289105261537f3977f7d1886f936d19bb3639d46/src/Tools/dotnet-monitor/Egress/Extension/EgressExtension.cs#L182); an example of how the `AzureBlobStorage` egress provider interprets the egress payload can be found [here](https://github.com/dotnet/dotnet-monitor/blob/289105261537f3977f7d1886f936d19bb3639d46/src/Microsoft.Diagnostics.Monitoring.Extension.Common/EgressHelper.cs#L139). **It's important to validate the version number at the beginning of the stream; if an extension does not have the same version as `dotnet-monitor`, it should not attempt to continue reading from the stream, and users may need to update to a newer version of the extension.** +[`dotnet monitor` will pass serialized configuration via `StdIn` to the extension](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/Egress/Extension/EgressExtension.cs#L182); an example of how the `AzureBlobStorage` egress provider interprets the egress payload can be found [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Microsoft.Diagnostics.Monitoring.Extension.Common/EgressHelper.cs#L141). **It's important to validate the version number at the beginning of the stream; if an extension does not have the same version as `dotnet-monitor`, it should not attempt to continue reading from the stream, and users may need to update to a newer version of the extension.** -All output from the extension will be passed back to `dotnet-monitor`; this is logged [here](https://github.com/dotnet/dotnet-monitor/blob/289105261537f3977f7d1886f936d19bb3639d46/src/Tools/dotnet-monitor/Egress/Extension/EgressExtension.OutputParser.cs#L62). The contents of the `StandardOutput` and `StandardError` streams are handled and logged as seen [here](https://github.com/dotnet/dotnet-monitor/blob/289105261537f3977f7d1886f936d19bb3639d46/src/Tools/dotnet-monitor/Egress/Extension/EgressExtension.OutputParser.cs#L32), with the `StandardOutput` stream being logged at the `Info` level and the `StandardError` stream being logged at the `Warning` level. `Dotnet-Monitor` will continue reading output until it receives a [result](https://github.com/dotnet/dotnet-monitor/blob/289105261537f3977f7d1886f936d19bb3639d46/src/Tools/dotnet-monitor/Egress/Extension/EgressArtifactResult.cs) from the extension via the `StandardOutput` stream, at which point the extension's process will be terminated and `dotnet-monitor` will display the appropriate log message depending on the success/failure of the operation. Exceptions thrown during the egress operation are caught [here](https://github.com/dotnet/dotnet-monitor/blob/289105261537f3977f7d1886f936d19bb3639d46/src/Microsoft.Diagnostics.Monitoring.Extension.Common/EgressHelper.cs#L53); this allows the extension to report a failure message back to `dotnet-monitor` that will be displayed to the user. +All output from the extension will be passed back to `dotnet-monitor`; this is logged [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/Egress/Extension/EgressExtension.OutputParser.cs#L62). The contents of the `StandardOutput` and `StandardError` streams are handled and logged as seen [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/Egress/Extension/EgressExtension.OutputParser.cs#L32), with the `StandardOutput` stream being logged at the `Info` level and the `StandardError` stream being logged at the `Warning` level. `Dotnet-Monitor` will continue reading output until it receives a [result](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tools/dotnet-monitor/Egress/Extension/EgressArtifactResult.cs) from the extension via the `StandardOutput` stream, at which point the extension's process will be terminated and `dotnet-monitor` will display the appropriate log message depending on the success/failure of the operation. Exceptions thrown during the egress operation are caught [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Microsoft.Diagnostics.Monitoring.Extension.Common/EgressHelper.cs#L71); this allows the extension to report a failure message back to `dotnet-monitor` that will be displayed to the user. diff --git a/documentation/learningPath/testing.md b/documentation/learningPath/testing.md index 38a36633348..9af61a15dd3 100644 --- a/documentation/learningPath/testing.md +++ b/documentation/learningPath/testing.md @@ -7,81 +7,81 @@ Tests can be executed with the command line (via [build.cmd](../../Build.cmd) -test), as part of the PR build, or in Visual Studio. Note that because of limited resources in the build pool, tests ran from the command line or in the build pool are serialized. This avoids test failures associated with parallel testing. Visual Studio does not have such restrictions and is best used for individual tests and test investigations. When running from the command line, using the `-testgroup` parameter can be used to limit the amount of tests executed. For example `build.cmd -test -testgroup PR` will run the same tests as the PR build. -The framework of the test assemblies is controlled by [TestTargetFrameworks](../../eng/Versions.props). The test itself is attributed with a particular framework based on the [TargetFrameworkMonikerTraitAttribute](../../src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/TargetFrameworkMonikerTraitAttribute.cs). +The framework of the test assemblies is controlled by [TestTargetFrameworks](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/eng/Versions.props). The test itself is attributed with a particular framework based on the [TargetFrameworkMonikerTraitAttribute](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/TargetFrameworkMonikerTraitAttribute.cs). ## Unit Tests -- [Microsoft.Diagnostics.Monitoring.Tool.UnitTests](../../src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTests) -- [Microsoft.Diagnostics.Monitoring.WebApi.UnitTests](../../src/Tests/Microsoft.Diagnostics.Monitoring.WebApi.UnitTests/) -- [CollectionRuleActions.UnitTests](../../src/Tests/CollectionRuleActions.UnitTests/) +- [Microsoft.Diagnostics.Monitoring.Tool.UnitTests](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTests) +- [Microsoft.Diagnostics.Monitoring.WebApi.UnitTests](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.WebApi.UnitTests/) +- [CollectionRuleActions.UnitTests](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/CollectionRuleActions.UnitTests/) Unit test assemblies directly reference types from various dotnet-monitor assemblies. However, since most of dotnet-monitor heavily relies on code injection, there are utility classes to simplify unit test creation. -- [TestHostHelper](../../src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTestCommon/TestHostHelper.cs) can be used to setup a basic unit test scenario using dependency injection. -- [CollectionRuleOptionsExtensions](../../src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTestCommon/Options/CollectionRuleOptionsExtensions.cs) can be used to easily create collection rules from configuration. +- [TestHostHelper](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTestCommon/TestHostHelper.cs) can be used to setup a basic unit test scenario using dependency injection. +- [CollectionRuleOptionsExtensions](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTestCommon/Options/CollectionRuleOptionsExtensions.cs) can be used to easily create collection rules from configuration. ## Functional Tests -- [Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests](../../src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests) -- [Microsoft.Diagnostics.Monitoring.UnitTestApp](../../src/Tests/Microsoft.Diagnostics.Monitoring.UnitTestApp/) +- [Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests) +- [Microsoft.Diagnostics.Monitoring.UnitTestApp](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.UnitTestApp/) Functional tests are composed of 3 main parts: 1. The test itself, which sets up and validates the results. 1. An instance of dotnet-monitor 1. An instance of an application that is being monitored (from the UnitTestApp assembly) -* [ScenarioRunner](../../src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/Runners/ScenarioRunner.cs) is typically used to orchestrate test runs. The class will spawn both an instance of dotnet-monitor and an instance of test application. The app and the test communicate via stdio. The test communicates with dotnet-monitor via its Api surface. -* The dotnet-monitor Api surface can be accessed through the [ApiClient](../../src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/HttpApi/ApiClient.cs). -* New scenarios can be added [here](../../src/Tests/Microsoft.Diagnostics.Monitoring.UnitTestApp/Scenarios/). -* The [AsyncWaitScenario](../../src/Tests/Microsoft.Diagnostics.Monitoring.UnitTestApp/Scenarios/AsyncWaitScenario.cs) is sufficient for most tests. -* Coordination of the scenario and the test is done via message passing (json over stdio) between the test and the app. To send messages to the app from the test, [AppRunner](../../src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/Runners/AppRunner.cs)'s `SendCommandAsync` is used. In the scenario definition, [ScenarioHelpers](../../src/Tests/Microsoft.Diagnostics.Monitoring.UnitTestApp/ScenarioHelpers.cs)'s `WaitForCommandAsync` is used. This can be used to synchronize various points of the test application with the execution of the dotnet-monitor Api from the test itself. +* [ScenarioRunner](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/Runners/ScenarioRunner.cs) is typically used to orchestrate test runs. The class will spawn both an instance of dotnet-monitor and an instance of test application. The app and the test communicate via stdio. The test communicates with dotnet-monitor via its Api surface. +* The dotnet-monitor Api surface can be accessed through the [ApiClient](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/HttpApi/ApiClient.cs). +* New scenarios can be added [here](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.UnitTestApp/Scenarios/). +* The [AsyncWaitScenario](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.UnitTestApp/Scenarios/AsyncWaitScenario.cs) is sufficient for most tests. +* Coordination of the scenario and the test is done via message passing (json over stdio) between the test and the app. To send messages to the app from the test, [AppRunner](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/Runners/AppRunner.cs)'s `SendCommandAsync` is used. In the scenario definition, [ScenarioHelpers](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.UnitTestApp/ScenarioHelpers.cs)'s `WaitForCommandAsync` is used. This can be used to synchronize various points of the test application with the execution of the dotnet-monitor Api from the test itself. ## Native/Profiler Tests -- [Microsoft.Diagnostics.Monitoring.Profiler.UnitTests](../../src/Tests/Microsoft.Diagnostics.Monitoring.Profiler.UnitTests/) -- [Microsoft.Diagnostics.Monitoring.Profiler.UnitTestApp](../../src/Tests/Microsoft.Diagnostics.Monitoring.Profiler.UnitTestApp/) +- [Microsoft.Diagnostics.Monitoring.Profiler.UnitTests](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.Profiler.UnitTests/) +- [Microsoft.Diagnostics.Monitoring.Profiler.UnitTestApp](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.Profiler.UnitTestApp/) This test assembly provides a test to make sure the dotnet-monitor profiler can load into a target app. ## Schema Generation -- [Microsoft.Diagnostics.Monitoring.ConfigurationSchema.UnitTests](../../src/Tests/Microsoft.Diagnostics.Monitoring.ConfigurationSchema.UnitTests/) -- [Microsoft.Diagnostics.Monitoring.ConfigurationSchema](../../src/Tests/Microsoft.Diagnostics.Monitoring.ConfigurationSchema/) -- [Microsoft.Diagnostics.Monitoring.Options](../../src/Microsoft.Diagnostics.Monitoring.Options) +- [Microsoft.Diagnostics.Monitoring.ConfigurationSchema.UnitTests](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.ConfigurationSchema.UnitTests/) +- [Microsoft.Diagnostics.Monitoring.ConfigurationSchema](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.ConfigurationSchema/) +- [Microsoft.Diagnostics.Monitoring.Options](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Microsoft.Diagnostics.Monitoring.Options) -Dotnet-monitor generates [schema.json](../../documentation/schema.json) using unit tests. If dotnet-monitor's configuration changes, the schema.json file needs to be updated. -Note that it is possible to compile option classes directly into the `ConfigurationSchema` project. This may be necessary in order to attribute properties appropriately for schema generation. See [Microsoft.Diagnostics.Monitoring.ConfigurationSchema.csproj](../../src/Tests/Microsoft.Diagnostics.Monitoring.ConfigurationSchema/Microsoft.Diagnostics.Monitoring.ConfigurationSchema.csproj). See the [Configuration](./configuration.md#how-configuration-works) learning path for more details. +Dotnet-monitor generates [schema.json](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/documentation/schema.json) using unit tests. If dotnet-monitor's configuration changes, the schema.json file needs to be updated. +Note that it is possible to compile option classes directly into the `ConfigurationSchema` project. This may be necessary in order to attribute properties appropriately for schema generation. See [Microsoft.Diagnostics.Monitoring.ConfigurationSchema.csproj](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.ConfigurationSchema/Microsoft.Diagnostics.Monitoring.ConfigurationSchema.csproj). See the [Configuration](./configuration.md#how-configuration-works) learning path for more details. ## OpenAPI generation -- [Microsoft.Diagnostics.Monitoring.OpenApiGen.UnitTests](../../src/Tests/Microsoft.Diagnostics.Monitoring.OpenApiGen.UnitTests/) -- [Microsoft.Diagnostics.Monitoring.OpenApiGen](../../src/Tests/Microsoft.Diagnostics.Monitoring.OpenApiGen/) +- [Microsoft.Diagnostics.Monitoring.OpenApiGen.UnitTests](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.OpenApiGen.UnitTests/) +- [Microsoft.Diagnostics.Monitoring.OpenApiGen](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.OpenApiGen/) -These assemblies and tests are used to generate the [OpenAPI spec](../../documentation/openapi.json) for the dotnet-monitor API. Changes to the dotnet-monitor api surface require updating `openapi.json`. +These assemblies and tests are used to generate the [OpenAPI spec](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/documentation/openapi.json) for the dotnet-monitor API. Changes to the dotnet-monitor api surface require updating `openapi.json`. If using VSCode or Codespaces, you can also use the `Regenerate openapi.json` task. ## Startup hooks / hosting startup -- [Microsoft.Diagnostics.Monitoring.Tool.TestStartupHook](../../src/Tests/Microsoft.Diagnostics.Monitoring.Tool.TestStartupHook/) +- [Microsoft.Diagnostics.Monitoring.Tool.TestStartupHook](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.TestStartupHook/) This assembly is injected into a dotnet-monitor runner (using `DOTNET_STARTUP_HOOKS`) to facilitate Assembly resolution during test runs. -- [Microsoft.Diagnostics.Monitoring.Tool.TestHostingStartup](../../src/Tests/Microsoft.Diagnostics.Monitoring.Tool.TestHostingStartup/) +- [Microsoft.Diagnostics.Monitoring.Tool.TestHostingStartup](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.TestHostingStartup/) Uses `ASPNETCORE_HOSTINGSTARTUPASSEMBLIES` to inject a service into dotnet-monitor during test time. This allows tests to locate files that are not normally part of the test deployment, such as the native profiler. -- [Microsoft.Diagnostics.Monitoring.StartupHook.UnitTests](../../src/Tests/Microsoft.Diagnostics.Monitoring.StartupHook.UnitTests/) +- [Microsoft.Diagnostics.Monitoring.StartupHook.UnitTests](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.StartupHook.UnitTests/) Unit tests around features that are injected via `DOTNET_STARTUP_HOOKS` into the target application. This currently includes the Exceptions History feature. ## Misc test assemblies -- [Microsoft.Diagnostics.Monitoring.TestCommon](../../src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/) +- [Microsoft.Diagnostics.Monitoring.TestCommon](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.TestCommon/) Utility classes that are shared between Unit Tests and Functional Tests. -- [Microsoft.Diagnostics.Monitoring.Tool.UnitTestCommon](../../src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTestCommon/) +- [Microsoft.Diagnostics.Monitoring.Tool.UnitTestCommon](https://github.com/dotnet/dotnet-monitor/blob/963f5824e2e4cef8fb6a969a03abb7d26e7eec56/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTestCommon/) Utility classes shared between unit test assemblies. diff --git a/documentation/openapi.json b/documentation/openapi.json index a58ee4ca9ff..ec77348cc84 100644 --- a/documentation/openapi.json +++ b/documentation/openapi.json @@ -1617,6 +1617,13 @@ }, "useDebuggerDisplayAttribute": { "type": "boolean" + }, + "captureLimit": { + "maximum": 2147483647, + "minimum": 1, + "type": "integer", + "format": "int32", + "nullable": true } }, "additionalProperties": false diff --git a/documentation/openapi.md b/documentation/openapi.md new file mode 100644 index 00000000000..9cdccf9c05f --- /dev/null +++ b/documentation/openapi.md @@ -0,0 +1,7 @@ +### Was this documentation helpful? [Share feedback](https://www.research.net/r/DGDQWXH?src=documentation%2Fopenapi) + +## OpenAPI documentation + +The OpenAPI definition for the dotnet-monitor API can be found at [openapi.json](openapi.json). This can be used to generate a client stub for calling the API from your own tools. + +Additionally, the document can be accessed from the root route ("/" path) when dotnet-monitor is running. This version of the document includes security scheme information if authentication has been configured. \ No newline at end of file diff --git a/documentation/releaseNotes/releaseNotes.v6.3.5.md b/documentation/releaseNotes/releaseNotes.v6.3.5.md new file mode 100644 index 00000000000..af631589440 --- /dev/null +++ b/documentation/releaseNotes/releaseNotes.v6.3.5.md @@ -0,0 +1,4 @@ +Today we are releasing the 6.3.5 build of the `dotnet monitor` tool. This release includes: +- Updated dependencies + +If you would like to provide additional feedback to the team [please fill out this survey](https://aka.ms/dotnet-monitor-survey?src=rn). diff --git a/documentation/releaseNotes/releaseNotes.v7.3.3.md b/documentation/releaseNotes/releaseNotes.v7.3.3.md new file mode 100644 index 00000000000..057256caf53 --- /dev/null +++ b/documentation/releaseNotes/releaseNotes.v7.3.3.md @@ -0,0 +1,4 @@ +Today we are releasing the 7.3.3 build of the `dotnet monitor` tool. This release includes: +- Updated dependencies + +If you would like to provide additional feedback to the team [please fill out this survey](https://aka.ms/dotnet-monitor-survey?src=rn). diff --git a/documentation/releaseNotes/releaseNotes.v8.0.1.md b/documentation/releaseNotes/releaseNotes.v8.0.1.md new file mode 100644 index 00000000000..a103051bef4 --- /dev/null +++ b/documentation/releaseNotes/releaseNotes.v8.0.1.md @@ -0,0 +1,7 @@ +Today we are releasing the 8.0.1 build of the `dotnet monitor` tool. This release includes: +- Updated dependencies +- ⚠️ Simplify Types in Plain Text Representation ([#5882](https://github.com/dotnet/dotnet-monitor/pull/5882)) + +\*⚠️ **_indicates a breaking change_** \ + +If you would like to provide additional feedback to the team [please fill out this survey](https://aka.ms/dotnet-monitor-survey?src=rn). diff --git a/documentation/releaseNotes/releaseNotes.v9.0.0-preview.1.md b/documentation/releaseNotes/releaseNotes.v9.0.0-preview.1.md new file mode 100644 index 00000000000..015928bcea2 --- /dev/null +++ b/documentation/releaseNotes/releaseNotes.v9.0.0-preview.1.md @@ -0,0 +1,8 @@ +Today we are releasing the next official preview version of the `dotnet monitor` tool. This release includes: + +- 🔬 Support capturing the following parameter types: generics, tuples, and nullable value types ([#5812](https://github.com/dotnet/dotnet-monitor/pull/5812)) +- Add Support for Meter Tags and Instrument Tags for System Diagnostics Metrics ([#5802](https://github.com/dotnet/dotnet-monitor/pull/5802)) + +\*🔬 **_indicates an experimental feature_** + +If you would like to provide additional feedback to the team [please fill out this survey](https://aka.ms/dotnet-monitor-survey?src=rn). diff --git a/documentation/releases.md b/documentation/releases.md index 676c9b52b9b..22614725b48 100644 --- a/documentation/releases.md +++ b/documentation/releases.md @@ -7,9 +7,9 @@ | Version | Original Release Date | Latest Patch Version | Patch Release Date | End of Support | Runtime Frameworks | | --- | --- | --- | --- | --- | --- | -| 8.0 | November 14, 2023 | [8.0.0](https://github.com/dotnet/dotnet-monitor/releases/tag/v8.0.0) | November 14, 2023 | | net8.0 | -| 7.3 | August 8, 2023 | [7.3.2](https://github.com/dotnet/dotnet-monitor/releases/tag/v7.3.2) | November 14, 2023 | | net6.0
net7.0 | -| 6.3 | October 11, 2022 | [6.3.4](https://github.com/dotnet/dotnet-monitor/releases/tag/v6.3.4) | November 14, 2023 | | net6.0
netcoreapp3.1 | +| 8.0 | November 14, 2023 | [8.0.1](https://github.com/dotnet/dotnet-monitor/releases/tag/v8.0.1) | February 13, 2024 | | net8.0 | +| 7.3 | August 8, 2023 | [7.3.3](https://github.com/dotnet/dotnet-monitor/releases/tag/v7.3.3) | February 13, 2024 | | net6.0
net7.0 | +| 6.3 | October 11, 2022 | [6.3.5](https://github.com/dotnet/dotnet-monitor/releases/tag/v6.3.5) | February 13, 2024 | | net6.0
netcoreapp3.1 | ## Out of support versions @@ -21,3 +21,10 @@ | 7.0 | November 11, 2022 | [7.0.2](https://github.com/dotnet/dotnet-monitor/releases/tag/v7.0.2) | February 14, 2023 | June 14, 2023 | net6.0
net7.0 | +## Preview versions + +| Version | Release Date | Latest Version | Runtime Frameworks | +| --- | --- | --- | --- | +| 9.0 | February 13, 2024 | [9.0.0 preview 1](https://github.com/dotnet/dotnet-monitor/releases/tag/v9.0.0-preview.1.24106.4) | net9.0 | + + diff --git a/documentation/schema.json b/documentation/schema.json index f618436f286..f35ff2401e7 100644 --- a/documentation/schema.json +++ b/documentation/schema.json @@ -494,13 +494,6 @@ "type": "string", "description": "The role required to be able to authenticate.", "minLength": 1 - }, - "SwaggerScope": { - "type": [ - "null", - "string" - ], - "description": "The API scope required by users to be able to interactively authenticate using the in-box Swagger UI. If not specified, users will not be able to interactively authenticate." } } }, @@ -1768,6 +1761,7 @@ ] }, "Filters": { + "description": "The filters that determine which exceptions should be included/excluded when collecting exceptions.", "oneOf": [ { "type": "null" diff --git a/documentation/swagger-ui.md b/documentation/swagger-ui.md deleted file mode 100644 index 8e1d8f5b14c..00000000000 --- a/documentation/swagger-ui.md +++ /dev/null @@ -1,15 +0,0 @@ -### Was this documentation helpful? [Share feedback](https://www.research.net/r/DGDQWXH?src=documentation%2Fswagger-ui) - -# Swagger API Explorer - -dotnet-monitor includes the Swagger UI for exploring the API surface of dotnet-monitor. It can be accessed from the /swagger path (and at the time of writing, will also be redirected to from the "/" path ). The API explorer enables you to see the API endpoints and try them directly from the browser. - -If dotnet-monitor is configured to use API Key authentication, then JWT token required to access the service can be supplied by clicking on the Authorize button at top right of the page, and pasting the token text into the popup dialog. - -## Known Limitations - -The swagger API explorer is not ideal for large downloads, which can result from collecting dumps. If collecting large dumps, its recommended to use `curl` to make those requests directly. The swagger UI will provide the curl command to make it easy to copy/paste into a terminal window. - -## OpenAPI documentation - -The OpenAPI definition for the dotnet-monitor API can be found at [openapi.json](openapi.json). This can be used to generate a client stub for calling the API from your own tools. \ No newline at end of file diff --git a/eng/Signing.props b/eng/Signing.props index 699b16dfc98..3bbc8889cff 100644 --- a/eng/Signing.props +++ b/eng/Signing.props @@ -6,7 +6,6 @@ - - - - - - https://github.com/dotnet/dotnet-monitor 9.0.0 @@ -53,26 +48,26 @@ --> - 9.0.0-beta.24105.3 - 9.0.0-beta.24105.3 - 9.0.0-beta.24105.3 + 9.0.0-beta.24151.5 + 9.0.0-beta.24151.5 + 9.0.0-beta.24151.5 - 9.0.0-preview.2.24128.4 - 9.0.0-preview.2.24128.4 + 9.0.0-preview.3.24151.1 + 9.0.0-preview.3.24151.1 - 2.0.0-beta4.24102.1 + 2.0.0-beta4.24126.1 - 8.0.510501 - 8.0.510501 + 8.0.0-preview.24151.1 + 8.0.0-preview.24151.1 - 9.0.100-preview.2.24154.2 + 9.0.100-preview.3.24153.2 - 9.0.0-preview.24075.1 + 9.0.0-preview.24151.1 - 9.0.0-preview.2.24128.5 - 9.0.0-preview.2.24128.5 + 9.0.0-preview.3.24129.2 + 9.0.0-preview.3.24129.2 - 1.0.507901 + 1.0.511901 $(MicrosoftNETCoreApp31Version) diff --git a/eng/common/native/init-distro-rid.sh b/eng/common/native/init-distro-rid.sh index de1687b2ccb..5dcbfd700f0 100644 --- a/eng/common/native/init-distro-rid.sh +++ b/eng/common/native/init-distro-rid.sh @@ -1,4 +1,4 @@ -#!/usr/bin/env bash +#!/bin/sh # getNonPortableDistroRid # @@ -11,21 +11,20 @@ # non-portable rid getNonPortableDistroRid() { - local targetOs="$1" - local targetArch="$2" - local rootfsDir="$3" - local nonPortableRid="" + targetOs="$1" + targetArch="$2" + rootfsDir="$3" + nonPortableRid="" if [ "$targetOs" = "linux" ]; then + # shellcheck disable=SC1091 if [ -e "${rootfsDir}/etc/os-release" ]; then - source "${rootfsDir}/etc/os-release" - - if [[ "${ID}" == "rhel" || "${ID}" == "rocky" || "${ID}" == "alpine" ]]; then - # remove the last version digit - VERSION_ID="${VERSION_ID%.*}" + . "${rootfsDir}/etc/os-release" + if [ "${ID}" = "rhel" ] || [ "${ID}" = "rocky" ] || [ "${ID}" = "alpine" ]; then + VERSION_ID="${VERSION_ID%.*}" # Remove the last version digit for these distros fi - if [[ "${VERSION_ID:-}" =~ ^([[:digit:]]|\.)+$ ]]; then + if echo "${VERSION_ID:-}" | grep -qE '^([[:digit:]]|\.)+$'; then nonPortableRid="${ID}.${VERSION_ID}-${targetArch}" else # Rolling release distros either do not set VERSION_ID, set it as blank or @@ -33,45 +32,45 @@ getNonPortableDistroRid() # so omit it here to be consistent with everything else. nonPortableRid="${ID}-${targetArch}" fi - elif [ -e "${rootfsDir}/android_platform" ]; then - source "$rootfsDir"/android_platform + # shellcheck disable=SC1091 + . "${rootfsDir}/android_platform" nonPortableRid="$RID" fi fi if [ "$targetOs" = "freebsd" ]; then - # $rootfsDir can be empty. freebsd-version is shell script and it should always work. - __freebsd_major_version=$($rootfsDir/bin/freebsd-version | { read v; echo "${v%%.*}"; }) + # $rootfsDir can be empty. freebsd-version is a shell script and should always work. + __freebsd_major_version=$("$rootfsDir"/bin/freebsd-version | cut -d'.' -f1) nonPortableRid="freebsd.$__freebsd_major_version-${targetArch}" - elif command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then + elif command -v getprop >/dev/null && getprop ro.product.system.model | grep -qi android; then __android_sdk_version=$(getprop ro.build.version.sdk) nonPortableRid="android.$__android_sdk_version-${targetArch}" elif [ "$targetOs" = "illumos" ]; then __uname_version=$(uname -v) case "$__uname_version" in omnios-*) - __omnios_major_version=$(echo "${__uname_version:8:2}") - nonPortableRid=omnios."$__omnios_major_version"-"$targetArch" - ;; + __omnios_major_version=$(echo "$__uname_version" | cut -c9-10) + nonPortableRid="omnios.$__omnios_major_version-${targetArch}" + ;; joyent_*) - __smartos_major_version=$(echo "${__uname_version:7:4}") - nonPortableRid=smartos."$__smartos_major_version"-"$targetArch" - ;; - illumos_*) - nonPortableRid=openindiana-"$targetArch" - ;; + __smartos_major_version=$(echo "$__uname_version" | cut -c9-10) + nonPortableRid="smartos.$__smartos_major_version-${targetArch}" + ;; + *) + nonPortableRid="illumos-${targetArch}" + ;; esac elif [ "$targetOs" = "solaris" ]; then __uname_version=$(uname -v) - __solaris_major_version=$(echo "${__uname_version%.*}") - nonPortableRid=solaris."$__solaris_major_version"-"$targetArch" + __solaris_major_version=$(echo "$__uname_version" | cut -d'.' -f1) + nonPortableRid="solaris.$__solaris_major_version-${targetArch}" elif [ "$targetOs" = "haiku" ]; then - __uname_release=$(uname -r) + __uname_release="$(uname -r)" nonPortableRid=haiku.r"$__uname_release"-"$targetArch" fi - echo "$(echo $nonPortableRid | tr '[:upper:]' '[:lower:]')" + echo "$nonPortableRid" | tr '[:upper:]' '[:lower:]' } # initDistroRidGlobal @@ -85,26 +84,23 @@ getNonPortableDistroRid() # None # # Notes: -# -# It is important to note that the function does not return anything, but it -# exports the following variables on success: -# -# __DistroRid : Non-portable rid of the target platform. -# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform. -# +# It is important to note that the function does not return anything, but it +# exports the following variables on success: +# __DistroRid : Non-portable rid of the target platform. +# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform. initDistroRidGlobal() { - local targetOs="$1" - local targetArch="$2" - local rootfsDir="" - if [ "$#" -ge 3 ]; then + targetOs="$1" + targetArch="$2" + rootfsDir="" + if [ $# -ge 3 ]; then rootfsDir="$3" fi if [ -n "${rootfsDir}" ]; then # We may have a cross build. Check for the existence of the rootfsDir if [ ! -e "${rootfsDir}" ]; then - echo "Error rootfsDir has been passed, but the location is not valid." + echo "Error: rootfsDir has been passed, but the location is not valid." exit 1 fi fi @@ -119,7 +115,7 @@ initDistroRidGlobal() STRINGS="$(command -v llvm-strings || true)" fi - # Check for musl-based distros (e.g Alpine Linux, Void Linux). + # Check for musl-based distros (e.g. Alpine Linux, Void Linux). if "${rootfsDir}/usr/bin/ldd" --version 2>&1 | grep -q musl || ( [ -n "$STRINGS" ] && "$STRINGS" "${rootfsDir}/usr/bin/ldd" 2>&1 | grep -q musl ); then __PortableTargetOS="linux-musl" diff --git a/eng/common/native/init-os-and-arch.sh b/eng/common/native/init-os-and-arch.sh index caa448ff030..38921d4338f 100644 --- a/eng/common/native/init-os-and-arch.sh +++ b/eng/common/native/init-os-and-arch.sh @@ -1,4 +1,4 @@ -#!/usr/bin/env bash +#!/bin/sh # Use uname to determine what the OS is. OSName=$(uname -s | tr '[:upper:]' '[:lower:]') @@ -54,6 +54,7 @@ case "$CPUName" in ;; armv7l|armv8l) + # shellcheck disable=SC1091 if (NAME=""; . /etc/os-release; test "$NAME" = "Tizen"); then arch=armel else diff --git a/eng/common/post-build/publish-using-darc.ps1 b/eng/common/post-build/publish-using-darc.ps1 index 1e779fec4dd..5a3a32ea8d7 100644 --- a/eng/common/post-build/publish-using-darc.ps1 +++ b/eng/common/post-build/publish-using-darc.ps1 @@ -12,7 +12,7 @@ param( try { . $PSScriptRoot\post-build-utils.ps1 - $darc = Get-Darc + $darc = Get-Darc $optionalParams = [System.Collections.ArrayList]::new() @@ -46,7 +46,7 @@ try { } Write-Host 'done.' -} +} catch { Write-Host $_ Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to publish build '$BuildId' to default channels." diff --git a/eng/common/templates-official/job/job.yml b/eng/common/templates-official/job/job.yml new file mode 100644 index 00000000000..352607308fd --- /dev/null +++ b/eng/common/templates-official/job/job.yml @@ -0,0 +1,255 @@ +# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, +# and some (Microbuild) should only be applied to non-PR cases for internal builds. + +parameters: +# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + cancelTimeoutInMinutes: '' + condition: '' + container: '' + continueOnError: false + dependsOn: '' + displayName: '' + pool: '' + steps: [] + strategy: '' + timeoutInMinutes: '' + variables: [] + workspace: '' + +# Job base template specific parameters + # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md + artifacts: '' + enableMicrobuild: false + enablePublishBuildArtifacts: false + enablePublishBuildAssets: false + enablePublishTestResults: false + enablePublishUsingPipelines: false + enableBuildRetry: false + disableComponentGovernance: '' + componentGovernanceIgnoreDirectories: '' + mergeTestResults: false + testRunTitle: '' + testResultsFormat: '' + name: '' + preSteps: [] + runAsPublic: false +# Sbom related params + enableSbom: true + PackageVersion: 7.0.0 + BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + +jobs: +- job: ${{ parameters.name }} + + ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}: + cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }} + + ${{ if ne(parameters.condition, '') }}: + condition: ${{ parameters.condition }} + + ${{ if ne(parameters.container, '') }}: + container: ${{ parameters.container }} + + ${{ if ne(parameters.continueOnError, '') }}: + continueOnError: ${{ parameters.continueOnError }} + + ${{ if ne(parameters.dependsOn, '') }}: + dependsOn: ${{ parameters.dependsOn }} + + ${{ if ne(parameters.displayName, '') }}: + displayName: ${{ parameters.displayName }} + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + + ${{ if ne(parameters.strategy, '') }}: + strategy: ${{ parameters.strategy }} + + ${{ if ne(parameters.timeoutInMinutes, '') }}: + timeoutInMinutes: ${{ parameters.timeoutInMinutes }} + + variables: + - ${{ if ne(parameters.enableTelemetry, 'false') }}: + - name: DOTNET_CLI_TELEMETRY_PROFILE + value: '$(Build.Repository.Uri)' + - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}: + - name: EnableRichCodeNavigation + value: 'true' + # Retry signature validation up to three times, waiting 2 seconds between attempts. + # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures + - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY + value: 3,2000 + - ${{ each variable in parameters.variables }}: + # handle name-value variable syntax + # example: + # - name: [key] + # value: [value] + - ${{ if ne(variable.name, '') }}: + - name: ${{ variable.name }} + value: ${{ variable.value }} + + # handle variable groups + - ${{ if ne(variable.group, '') }}: + - group: ${{ variable.group }} + + # handle template variable syntax + # example: + # - template: path/to/template.yml + # parameters: + # [key]: [value] + - ${{ if ne(variable.template, '') }}: + - template: ${{ variable.template }} + ${{ if ne(variable.parameters, '') }}: + parameters: ${{ variable.parameters }} + + # handle key-value variable syntax. + # example: + # - [key]: [value] + - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}: + - ${{ each pair in variable }}: + - name: ${{ pair.key }} + value: ${{ pair.value }} + + # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds + - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - group: DotNet-HelixApi-Access + + ${{ if ne(parameters.workspace, '') }}: + workspace: ${{ parameters.workspace }} + + steps: + - ${{ if ne(parameters.preSteps, '') }}: + - ${{ each preStep in parameters.preSteps }}: + - ${{ preStep }} + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + - task: MicroBuildSigningPlugin@3 + displayName: Install MicroBuild plugin + inputs: + signType: $(_SignType) + zipSources: false + feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json + env: + TeamName: $(_TeamName) + continueOnError: ${{ parameters.continueOnError }} + condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) + + - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}: + - task: NuGetAuthenticate@1 + + - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}: + - task: DownloadPipelineArtifact@2 + inputs: + buildType: current + artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }} + targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }} + itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }} + + - ${{ each step in parameters.steps }}: + - ${{ step }} + + - ${{ if eq(parameters.enableRichCodeNavigation, true) }}: + - task: RichCodeNavIndexer@0 + displayName: RichCodeNav Upload + inputs: + languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }} + environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }} + richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin + uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }} + continueOnError: true + + - template: /eng/common/templates-official/steps/component-governance.yml + parameters: + ${{ if eq(parameters.disableComponentGovernance, '') }}: + ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}: + disableComponentGovernance: false + ${{ else }}: + disableComponentGovernance: true + ${{ else }}: + disableComponentGovernance: ${{ parameters.disableComponentGovernance }} + componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} + + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - task: MicroBuildCleanup@1 + displayName: Execute Microbuild cleanup tasks + condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) + continueOnError: ${{ parameters.continueOnError }} + env: + TeamName: $(_TeamName) + + - ${{ if ne(parameters.artifacts.publish, '') }}: + - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: + - task: CopyFiles@2 + displayName: Gather binaries for publish to artifacts + inputs: + SourceFolder: 'artifacts/bin' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin' + - task: CopyFiles@2 + displayName: Gather packages for publish to artifacts + inputs: + SourceFolder: 'artifacts/packages' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages' + - task: 1ES.PublishBuildArtifacts@1 + displayName: Publish pipeline artifacts + inputs: + PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts' + PublishLocation: Container + ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} + continueOnError: true + condition: always() + - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: + - publish: artifacts/log + artifact: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }} + displayName: Publish logs + continueOnError: true + condition: always() + + - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}: + - task: 1ES.PublishBuildArtifacts@1 + displayName: Publish Logs + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)' + PublishLocation: Container + ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }} + continueOnError: true + condition: always() + + - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}: + - task: PublishTestResults@2 + displayName: Publish XUnit Test Results + inputs: + testResultsFormat: 'xUnit' + testResultsFiles: '*.xml' + searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' + testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit + mergeTestResults: ${{ parameters.mergeTestResults }} + continueOnError: true + condition: always() + - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}: + - task: PublishTestResults@2 + displayName: Publish TRX Test Results + inputs: + testResultsFormat: 'VSTest' + testResultsFiles: '*.trx' + searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' + testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx + mergeTestResults: ${{ parameters.mergeTestResults }} + continueOnError: true + condition: always() + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}: + - template: /eng/common/templates-official/steps/generate-sbom.yml + parameters: + PackageVersion: ${{ parameters.packageVersion}} + BuildDropPath: ${{ parameters.buildDropPath }} + IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} + + - ${{ if eq(parameters.enableBuildRetry, 'true') }}: + - publish: $(Build.SourcesDirectory)\eng\common\BuildConfiguration + artifact: BuildConfiguration + displayName: Publish build retry configuration + continueOnError: true diff --git a/eng/common/templates-official/job/onelocbuild.yml b/eng/common/templates-official/job/onelocbuild.yml new file mode 100644 index 00000000000..ba9ba493032 --- /dev/null +++ b/eng/common/templates-official/job/onelocbuild.yml @@ -0,0 +1,112 @@ +parameters: + # Optional: dependencies of the job + dependsOn: '' + + # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool + pool: '' + + CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex + GithubPat: $(BotAccount-dotnet-bot-repo-PAT) + + SourcesDirectory: $(Build.SourcesDirectory) + CreatePr: true + AutoCompletePr: false + ReusePr: true + UseLfLineEndings: true + UseCheckedInLocProjectJson: false + SkipLocProjectJsonGeneration: false + LanguageSet: VS_Main_Languages + LclSource: lclFilesInRepo + LclPackageId: '' + RepoType: gitHub + GitHubOrg: dotnet + MirrorRepo: '' + MirrorBranch: main + condition: '' + JobNameSuffix: '' + +jobs: +- job: OneLocBuild${{ parameters.JobNameSuffix }} + + dependsOn: ${{ parameters.dependsOn }} + + displayName: OneLocBuild${{ parameters.JobNameSuffix }} + + variables: + - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat + - name: _GenerateLocProjectArguments + value: -SourcesDirectory ${{ parameters.SourcesDirectory }} + -LanguageSet "${{ parameters.LanguageSet }}" + -CreateNeutralXlfs + - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}: + - name: _GenerateLocProjectArguments + value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson + - template: /eng/common/templates-official/variables/pool-providers.yml + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + ${{ if eq(parameters.pool, '') }}: + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022-pt + os: windows + + steps: + - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}: + - task: Powershell@2 + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1 + arguments: $(_GenerateLocProjectArguments) + displayName: Generate LocProject.json + condition: ${{ parameters.condition }} + + - task: OneLocBuild@2 + displayName: OneLocBuild + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + inputs: + locProj: eng/Localize/LocProject.json + outDir: $(Build.ArtifactStagingDirectory) + lclSource: ${{ parameters.LclSource }} + lclPackageId: ${{ parameters.LclPackageId }} + isCreatePrSelected: ${{ parameters.CreatePr }} + isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }} + ${{ if eq(parameters.CreatePr, true) }}: + isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }} + ${{ if eq(parameters.RepoType, 'gitHub') }}: + isShouldReusePrSelected: ${{ parameters.ReusePr }} + packageSourceAuth: patAuth + patVariable: ${{ parameters.CeapexPat }} + ${{ if eq(parameters.RepoType, 'gitHub') }}: + repoType: ${{ parameters.RepoType }} + gitHubPatVariable: "${{ parameters.GithubPat }}" + ${{ if ne(parameters.MirrorRepo, '') }}: + isMirrorRepoSelected: true + gitHubOrganization: ${{ parameters.GitHubOrg }} + mirrorRepo: ${{ parameters.MirrorRepo }} + mirrorBranch: ${{ parameters.MirrorBranch }} + condition: ${{ parameters.condition }} + + - task: 1ES.PublishBuildArtifacts@1 + displayName: Publish Localization Files + inputs: + PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc' + PublishLocation: Container + ArtifactName: Loc + condition: ${{ parameters.condition }} + + - task: 1ES.PublishBuildArtifacts@1 + displayName: Publish LocProject.json + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/' + PublishLocation: Container + ArtifactName: Loc + condition: ${{ parameters.condition }} \ No newline at end of file diff --git a/eng/common/templates-official/job/publish-build-assets.yml b/eng/common/templates-official/job/publish-build-assets.yml new file mode 100644 index 00000000000..5f54135569b --- /dev/null +++ b/eng/common/templates-official/job/publish-build-assets.yml @@ -0,0 +1,157 @@ +parameters: + configuration: 'Debug' + + # Optional: condition for the job to run + condition: '' + + # Optional: 'true' if future jobs should run even if this job fails + continueOnError: false + + # Optional: dependencies of the job + dependsOn: '' + + # Optional: Include PublishBuildArtifacts task + enablePublishBuildArtifacts: false + + # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool + pool: {} + + # Optional: should run as a public build even in the internal project + # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. + runAsPublic: false + + # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing + publishUsingPipelines: false + + # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing + publishAssetsImmediately: false + + artifactsPublishingAdditionalParameters: '' + + signingValidationAdditionalParameters: '' + +jobs: +- job: Asset_Registry_Publish + + dependsOn: ${{ parameters.dependsOn }} + timeoutInMinutes: 150 + + ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + displayName: Publish Assets + ${{ else }}: + displayName: Publish to Build Asset Registry + + variables: + - template: /eng/common/templates-official/variables/pool-providers.yml + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - group: Publish-Build-Assets + - group: AzureDevOps-Artifact-Feeds-Pats + - name: runCodesignValidationInjection + value: false + # unconditional - needed for logs publishing (redactor tool version) + - template: /eng/common/templates-official/post-build/common-variables.yml + + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022-pt + os: windows + steps: + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - checkout: self + fetchDepth: 3 + clean: true + + - task: DownloadBuildArtifacts@0 + displayName: Download artifact + inputs: + artifactName: AssetManifests + downloadPath: '$(Build.StagingDirectory)/Download' + checkDownloadedFiles: true + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + + - task: NuGetAuthenticate@1 + + - task: PowerShell@2 + displayName: Publish Build Assets + inputs: + filePath: eng\common\sdk-task.ps1 + arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet + /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests' + /p:BuildAssetRegistryToken=$(MaestroAccessToken) + /p:MaestroApiEndpoint=https://maestro.dot.net + /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }} + /p:OfficialBuildId=$(Build.BuildNumber) + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + + - task: powershell@2 + displayName: Create ReleaseConfigs Artifact + inputs: + targetType: inline + script: | + Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId) + Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)" + Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild) + + - task: 1ES.PublishBuildArtifacts@1 + displayName: Publish ReleaseConfigs Artifact + inputs: + PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt' + PublishLocation: Container + ArtifactName: ReleaseConfigs + + - task: powershell@2 + displayName: Check if SymbolPublishingExclusionsFile.txt exists + inputs: + targetType: inline + script: | + $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt" + if(Test-Path -Path $symbolExclusionfile) + { + Write-Host "SymbolExclusionFile exists" + Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true" + } + else{ + Write-Host "Symbols Exclusion file does not exists" + Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false" + } + + - task: 1ES.PublishBuildArtifacts@1 + displayName: Publish SymbolPublishingExclusionsFile Artifact + condition: eq(variables['SymbolExclusionFile'], 'true') + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt' + PublishLocation: Container + ArtifactName: ReleaseConfigs + + - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + - template: /eng/common/templates-official/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: PowerShell@2 + displayName: Publish Using Darc + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 + arguments: -BuildId $(BARBuildId) + -PublishingInfraVersion 3 + -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' + -MaestroToken '$(MaestroApiAccessToken)' + -WaitPublishingFinish true + -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' + -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' + + - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: + - template: /eng/common/templates-official/steps/publish-logs.yml + parameters: + JobLabel: 'Publish_Artifacts_Logs' diff --git a/eng/common/templates-official/job/source-build.yml b/eng/common/templates-official/job/source-build.yml new file mode 100644 index 00000000000..50f04e642a3 --- /dev/null +++ b/eng/common/templates-official/job/source-build.yml @@ -0,0 +1,67 @@ +parameters: + # This template adds arcade-powered source-build to CI. The template produces a server job with a + # default ID 'Source_Build_Complete' to put in a dependency list if necessary. + + # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed. + jobNamePrefix: 'Source_Build' + + # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for + # managed-only repositories. This is an object with these properties: + # + # name: '' + # The name of the job. This is included in the job ID. + # targetRID: '' + # The name of the target RID to use, instead of the one auto-detected by Arcade. + # nonPortable: false + # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than + # linux-x64), and compiling against distro-provided packages rather than portable ones. + # skipPublishValidation: false + # Disables publishing validation. By default, a check is performed to ensure no packages are + # published by source-build. + # container: '' + # A container to use. Runs in docker. + # pool: {} + # A pool to use. Runs directly on an agent. + # buildScript: '' + # Specifies the build script to invoke to perform the build in the repo. The default + # './build.sh' should work for typical Arcade repositories, but this is customizable for + # difficult situations. + # jobProperties: {} + # A list of job properties to inject at the top level, for potential extensibility beyond + # container and pool. + platform: {} + +jobs: +- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }} + displayName: Source-Build (${{ parameters.platform.name }}) + + ${{ each property in parameters.platform.jobProperties }}: + ${{ property.key }}: ${{ property.value }} + + ${{ if ne(parameters.platform.container, '') }}: + container: ${{ parameters.platform.container }} + + ${{ if eq(parameters.platform.pool, '') }}: + # The default VM host AzDO pool. This should be capable of running Docker containers: almost all + # source-build builds run in Docker, including the default managed platform. + # /eng/common/templates-official/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic + pool: + ${{ if eq(variables['System.TeamProject'], 'public') }}: + name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')] + demands: ImageOverride -equals build.ubuntu.1804.amd64 + + ${{ if eq(variables['System.TeamProject'], 'internal') }}: + name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')] + image: 1es-mariner-2-pt + os: linux + + ${{ if ne(parameters.platform.pool, '') }}: + pool: ${{ parameters.platform.pool }} + + workspace: + clean: all + + steps: + - template: /eng/common/templates-official/steps/source-build.yml + parameters: + platform: ${{ parameters.platform }} diff --git a/eng/common/templates-official/job/source-index-stage1.yml b/eng/common/templates-official/job/source-index-stage1.yml new file mode 100644 index 00000000000..757af7c7c4f --- /dev/null +++ b/eng/common/templates-official/job/source-index-stage1.yml @@ -0,0 +1,67 @@ +parameters: + runAsPublic: false + sourceIndexPackageVersion: 1.0.1-20231213.4 + sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json + sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci" + preSteps: [] + binlogPath: artifacts/log/Debug/Build.binlog + condition: '' + dependsOn: '' + pool: '' + +jobs: +- job: SourceIndexStage1 + dependsOn: ${{ parameters.dependsOn }} + condition: ${{ parameters.condition }} + variables: + - name: SourceIndexPackageVersion + value: ${{ parameters.sourceIndexPackageVersion }} + - name: SourceIndexPackageSource + value: ${{ parameters.sourceIndexPackageSource }} + - name: BinlogPath + value: ${{ parameters.binlogPath }} + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - group: source-dot-net stage1 variables + - template: /eng/common/templates-official/variables/pool-providers.yml + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + ${{ if eq(parameters.pool, '') }}: + pool: + ${{ if eq(variables['System.TeamProject'], 'public') }}: + name: $(DncEngPublicBuildPool) + image: windows.vs2022.amd64.open + ${{ if eq(variables['System.TeamProject'], 'internal') }}: + name: $(DncEngInternalBuildPool) + image: windows.vs2022.amd64 + + steps: + - ${{ each preStep in parameters.preSteps }}: + - ${{ preStep }} + + - task: UseDotNet@2 + displayName: Use .NET 8 SDK + inputs: + packageType: sdk + version: 8.0.x + installationPath: $(Agent.TempDirectory)/dotnet + workingDirectory: $(Agent.TempDirectory) + + - script: | + $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools + $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools + displayName: Download Tools + # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk. + workingDirectory: $(Agent.TempDirectory) + + - script: ${{ parameters.sourceIndexBuildCommand }} + displayName: Build Repository + + - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output + displayName: Process Binlog into indexable sln + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) + displayName: Upload stage1 artifacts to source index + env: + BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url) diff --git a/eng/common/templates-official/jobs/codeql-build.yml b/eng/common/templates-official/jobs/codeql-build.yml new file mode 100644 index 00000000000..b68d3c2f319 --- /dev/null +++ b/eng/common/templates-official/jobs/codeql-build.yml @@ -0,0 +1,31 @@ +parameters: + # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md + continueOnError: false + # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + jobs: [] + # Optional: if specified, restore and use this version of Guardian instead of the default. + overrideGuardianVersion: '' + +jobs: +- template: /eng/common/templates-official/jobs/jobs.yml + parameters: + enableMicrobuild: false + enablePublishBuildArtifacts: false + enablePublishTestResults: false + enablePublishBuildAssets: false + enablePublishUsingPipelines: false + enableTelemetry: true + + variables: + - group: Publish-Build-Assets + # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in + # sync with the packages.config file. + - name: DefaultGuardianVersion + value: 0.109.0 + - name: GuardianPackagesConfigFile + value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config + - name: GuardianVersion + value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} + + jobs: ${{ parameters.jobs }} + diff --git a/eng/common/templates-official/jobs/jobs.yml b/eng/common/templates-official/jobs/jobs.yml new file mode 100644 index 00000000000..857a0f8ba43 --- /dev/null +++ b/eng/common/templates-official/jobs/jobs.yml @@ -0,0 +1,97 @@ +parameters: + # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md + continueOnError: false + + # Optional: Include PublishBuildArtifacts task + enablePublishBuildArtifacts: false + + # Optional: Enable publishing using release pipelines + enablePublishUsingPipelines: false + + # Optional: Enable running the source-build jobs to build repo from source + enableSourceBuild: false + + # Optional: Parameters for source-build template. + # See /eng/common/templates-official/jobs/source-build.yml for options + sourceBuildParameters: [] + + graphFileGeneration: + # Optional: Enable generating the graph files at the end of the build + enabled: false + # Optional: Include toolset dependencies in the generated graph files + includeToolset: false + + # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + jobs: [] + + # Optional: Override automatically derived dependsOn value for "publish build assets" job + publishBuildAssetsDependsOn: '' + + # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage. + publishAssetsImmediately: false + + # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml) + artifactsPublishingAdditionalParameters: '' + signingValidationAdditionalParameters: '' + + # Optional: should run as a public build even in the internal project + # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. + runAsPublic: false + + enableSourceIndex: false + sourceIndexParams: {} + +# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, +# and some (Microbuild) should only be applied to non-PR cases for internal builds. + +jobs: +- ${{ each job in parameters.jobs }}: + - template: ../job/job.yml + parameters: + # pass along parameters + ${{ each parameter in parameters }}: + ${{ if ne(parameter.key, 'jobs') }}: + ${{ parameter.key }}: ${{ parameter.value }} + + # pass along job properties + ${{ each property in job }}: + ${{ if ne(property.key, 'job') }}: + ${{ property.key }}: ${{ property.value }} + + name: ${{ job.job }} + +- ${{ if eq(parameters.enableSourceBuild, true) }}: + - template: /eng/common/templates-official/jobs/source-build.yml + parameters: + allCompletedJobId: Source_Build_Complete + ${{ each parameter in parameters.sourceBuildParameters }}: + ${{ parameter.key }}: ${{ parameter.value }} + +- ${{ if eq(parameters.enableSourceIndex, 'true') }}: + - template: ../job/source-index-stage1.yml + parameters: + runAsPublic: ${{ parameters.runAsPublic }} + ${{ each parameter in parameters.sourceIndexParams }}: + ${{ parameter.key }}: ${{ parameter.value }} + +- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}: + - template: ../job/publish-build-assets.yml + parameters: + continueOnError: ${{ parameters.continueOnError }} + dependsOn: + - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}: + - ${{ each job in parameters.publishBuildAssetsDependsOn }}: + - ${{ job.job }} + - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}: + - ${{ each job in parameters.jobs }}: + - ${{ job.job }} + - ${{ if eq(parameters.enableSourceBuild, true) }}: + - Source_Build_Complete + + runAsPublic: ${{ parameters.runAsPublic }} + publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }} + publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }} + enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} + artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} + signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }} diff --git a/eng/common/templates-official/jobs/source-build.yml b/eng/common/templates-official/jobs/source-build.yml new file mode 100644 index 00000000000..08e5db9bb11 --- /dev/null +++ b/eng/common/templates-official/jobs/source-build.yml @@ -0,0 +1,46 @@ +parameters: + # This template adds arcade-powered source-build to CI. A job is created for each platform, as + # well as an optional server job that completes when all platform jobs complete. + + # The name of the "join" job for all source-build platforms. If set to empty string, the job is + # not included. Existing repo pipelines can use this job depend on all source-build jobs + # completing without maintaining a separate list of every single job ID: just depend on this one + # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'. + allCompletedJobId: '' + + # See /eng/common/templates-official/job/source-build.yml + jobNamePrefix: 'Source_Build' + + # This is the default platform provided by Arcade, intended for use by a managed-only repo. + defaultManagedPlatform: + name: 'Managed' + container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8' + + # Defines the platforms on which to run build jobs. One job is created for each platform, and the + # object in this array is sent to the job template as 'platform'. If no platforms are specified, + # one job runs on 'defaultManagedPlatform'. + platforms: [] + +jobs: + +- ${{ if ne(parameters.allCompletedJobId, '') }}: + - job: ${{ parameters.allCompletedJobId }} + displayName: Source-Build Complete + pool: server + dependsOn: + - ${{ each platform in parameters.platforms }}: + - ${{ parameters.jobNamePrefix }}_${{ platform.name }} + - ${{ if eq(length(parameters.platforms), 0) }}: + - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }} + +- ${{ each platform in parameters.platforms }}: + - template: /eng/common/templates-official/job/source-build.yml + parameters: + jobNamePrefix: ${{ parameters.jobNamePrefix }} + platform: ${{ platform }} + +- ${{ if eq(length(parameters.platforms), 0) }}: + - template: /eng/common/templates-official/job/source-build.yml + parameters: + jobNamePrefix: ${{ parameters.jobNamePrefix }} + platform: ${{ parameters.defaultManagedPlatform }} diff --git a/eng/common/templates-official/post-build/common-variables.yml b/eng/common/templates-official/post-build/common-variables.yml new file mode 100644 index 00000000000..b9ede10bf09 --- /dev/null +++ b/eng/common/templates-official/post-build/common-variables.yml @@ -0,0 +1,24 @@ +variables: + - group: Publish-Build-Assets + + # Whether the build is internal or not + - name: IsInternalBuild + value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }} + + # Default Maestro++ API Endpoint and API Version + - name: MaestroApiEndPoint + value: "https://maestro.dot.net" + - name: MaestroApiAccessToken + value: $(MaestroAccessToken) + - name: MaestroApiVersion + value: "2020-02-20" + + - name: SourceLinkCLIVersion + value: 3.0.0 + - name: SymbolToolVersion + value: 1.0.1 + - name: BinlogToolVersion + value: 1.0.11 + + - name: runCodesignValidationInjection + value: false diff --git a/eng/common/templates-official/post-build/post-build.yml b/eng/common/templates-official/post-build/post-build.yml new file mode 100644 index 00000000000..5c98fe1c0f3 --- /dev/null +++ b/eng/common/templates-official/post-build/post-build.yml @@ -0,0 +1,285 @@ +parameters: + # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST. + # Publishing V1 is no longer supported + # Publishing V2 is no longer supported + # Publishing V3 is the default + - name: publishingInfraVersion + displayName: Which version of publishing should be used to promote the build definition? + type: number + default: 3 + values: + - 3 + + - name: BARBuildId + displayName: BAR Build Id + type: number + default: 0 + + - name: PromoteToChannelIds + displayName: Channel to promote BARBuildId to + type: string + default: '' + + - name: enableSourceLinkValidation + displayName: Enable SourceLink validation + type: boolean + default: false + + - name: enableSigningValidation + displayName: Enable signing validation + type: boolean + default: true + + - name: enableSymbolValidation + displayName: Enable symbol validation + type: boolean + default: false + + - name: enableNugetValidation + displayName: Enable NuGet validation + type: boolean + default: true + + - name: publishInstallersAndChecksums + displayName: Publish installers and checksums + type: boolean + default: true + + - name: SDLValidationParameters + type: object + default: + enable: false + publishGdn: false + continueOnError: false + params: '' + artifactNames: '' + downloadArtifacts: true + + # These parameters let the user customize the call to sdk-task.ps1 for publishing + # symbols & general artifacts as well as for signing validation + - name: symbolPublishingAdditionalParameters + displayName: Symbol publishing additional parameters + type: string + default: '' + + - name: artifactsPublishingAdditionalParameters + displayName: Artifact publishing additional parameters + type: string + default: '' + + - name: signingValidationAdditionalParameters + displayName: Signing validation additional parameters + type: string + default: '' + + # Which stages should finish execution before post-build stages start + - name: validateDependsOn + type: object + default: + - build + + - name: publishDependsOn + type: object + default: + - Validate + + # Optional: Call asset publishing rather than running in a separate stage + - name: publishAssetsImmediately + type: boolean + default: false + +stages: +- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: + - stage: Validate + dependsOn: ${{ parameters.validateDependsOn }} + displayName: Validate Build Assets + variables: + - template: common-variables.yml + - template: /eng/common/templates-official/variables/pool-providers.yml + jobs: + - job: + displayName: NuGet Validation + condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true')) + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022-pt + os: windows + + steps: + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Package Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: PackageArtifacts + checkDownloadedFiles: true + + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1 + arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ + -ToolDestinationPath $(Agent.BuildDirectory)/Extract/ + + - job: + displayName: Signing Validation + condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true')) + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022-pt + os: windows + steps: + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Package Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: PackageArtifacts + checkDownloadedFiles: true + itemPattern: | + ** + !**/Microsoft.SourceBuild.Intermediate.*.nupkg + + # This is necessary whenever we want to publish/restore to an AzDO private feed + # Since sdk-task.ps1 tries to restore packages we need to do this authentication here + # otherwise it'll complain about accessing a private feed. + - task: NuGetAuthenticate@1 + displayName: 'Authenticate to AzDO Feeds' + + # Signing validation will optionally work with the buildmanifest file which is downloaded from + # Azure DevOps above. + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: eng\common\sdk-task.ps1 + arguments: -task SigningValidation -restore -msbuildEngine vs + /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' + /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt' + ${{ parameters.signingValidationAdditionalParameters }} + + - template: ../steps/publish-logs.yml + parameters: + StageLabel: 'Validation' + JobLabel: 'Signing' + BinlogToolVersion: $(BinlogToolVersion) + + - job: + displayName: SourceLink Validation + condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true') + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022-pt + os: windows + steps: + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Blob Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: BlobArtifacts + checkDownloadedFiles: true + + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1 + arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ + -ExtractPath $(Agent.BuildDirectory)/Extract/ + -GHRepoName $(Build.Repository.Name) + -GHCommit $(Build.SourceVersion) + -SourcelinkCliVersion $(SourceLinkCLIVersion) + continueOnError: true + +- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}: + - stage: publish_using_darc + ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: + dependsOn: ${{ parameters.publishDependsOn }} + ${{ else }}: + dependsOn: ${{ parameters.validateDependsOn }} + displayName: Publish using Darc + variables: + - template: common-variables.yml + - template: /eng/common/templates-official/variables/pool-providers.yml + jobs: + - job: + displayName: Publish Using Darc + timeoutInMinutes: 120 + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022-pt + os: windows + steps: + - template: setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + + - task: NuGetAuthenticate@1 + + - task: PowerShell@2 + displayName: Publish Using Darc + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 + arguments: -BuildId $(BARBuildId) + -PublishingInfraVersion ${{ parameters.publishingInfraVersion }} + -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' + -MaestroToken '$(MaestroApiAccessToken)' + -WaitPublishingFinish true + -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' + -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' diff --git a/eng/common/templates-official/post-build/setup-maestro-vars.yml b/eng/common/templates-official/post-build/setup-maestro-vars.yml new file mode 100644 index 00000000000..0c87f149a4a --- /dev/null +++ b/eng/common/templates-official/post-build/setup-maestro-vars.yml @@ -0,0 +1,70 @@ +parameters: + BARBuildId: '' + PromoteToChannelIds: '' + +steps: + - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}: + - task: DownloadBuildArtifacts@0 + displayName: Download Release Configs + inputs: + buildType: current + artifactName: ReleaseConfigs + checkDownloadedFiles: true + + - task: PowerShell@2 + name: setReleaseVars + displayName: Set Release Configs Vars + inputs: + targetType: inline + pwsh: true + script: | + try { + if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') { + $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt + + $BarId = $Content | Select -Index 0 + $Channels = $Content | Select -Index 1 + $IsStableBuild = $Content | Select -Index 2 + + $AzureDevOpsProject = $Env:System_TeamProject + $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId + $AzureDevOpsBuildId = $Env:Build_BuildId + } + else { + $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}" + + $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]' + $apiHeaders.Add('Accept', 'application/json') + $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}") + + $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" } + + $BarId = $Env:BARBuildId + $Channels = $Env:PromoteToMaestroChannels -split "," + $Channels = $Channels -join "][" + $Channels = "[$Channels]" + + $IsStableBuild = $buildInfo.stable + $AzureDevOpsProject = $buildInfo.azureDevOpsProject + $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId + $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId + } + + Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId" + Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels" + Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild" + + Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject" + Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId" + Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId" + } + catch { + Write-Host $_ + Write-Host $_.Exception + Write-Host $_.ScriptStackTrace + exit 1 + } + env: + MAESTRO_API_TOKEN: $(MaestroApiAccessToken) + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }} diff --git a/eng/common/templates-official/post-build/trigger-subscription.yml b/eng/common/templates-official/post-build/trigger-subscription.yml new file mode 100644 index 00000000000..da669030daf --- /dev/null +++ b/eng/common/templates-official/post-build/trigger-subscription.yml @@ -0,0 +1,13 @@ +parameters: + ChannelId: 0 + +steps: +- task: PowerShell@2 + displayName: Triggering subscriptions + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1 + arguments: -SourceRepo $(Build.Repository.Uri) + -ChannelId ${{ parameters.ChannelId }} + -MaestroApiAccessToken $(MaestroAccessToken) + -MaestroApiEndPoint $(MaestroApiEndPoint) + -MaestroApiVersion $(MaestroApiVersion) diff --git a/eng/common/templates-official/steps/add-build-to-channel.yml b/eng/common/templates-official/steps/add-build-to-channel.yml new file mode 100644 index 00000000000..f67a210d62f --- /dev/null +++ b/eng/common/templates-official/steps/add-build-to-channel.yml @@ -0,0 +1,13 @@ +parameters: + ChannelId: 0 + +steps: +- task: PowerShell@2 + displayName: Add Build to Channel + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1 + arguments: -BuildId $(BARBuildId) + -ChannelId ${{ parameters.ChannelId }} + -MaestroApiAccessToken $(MaestroApiAccessToken) + -MaestroApiEndPoint $(MaestroApiEndPoint) + -MaestroApiVersion $(MaestroApiVersion) diff --git a/eng/common/templates-official/steps/component-governance.yml b/eng/common/templates-official/steps/component-governance.yml new file mode 100644 index 00000000000..0ecec47b0c9 --- /dev/null +++ b/eng/common/templates-official/steps/component-governance.yml @@ -0,0 +1,13 @@ +parameters: + disableComponentGovernance: false + componentGovernanceIgnoreDirectories: '' + +steps: +- ${{ if eq(parameters.disableComponentGovernance, 'true') }}: + - script: "echo ##vso[task.setvariable variable=skipComponentGovernanceDetection]true" + displayName: Set skipComponentGovernanceDetection variable +- ${{ if ne(parameters.disableComponentGovernance, 'true') }}: + - task: ComponentGovernanceComponentDetection@0 + continueOnError: true + inputs: + ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} \ No newline at end of file diff --git a/eng/common/templates-official/steps/generate-sbom.yml b/eng/common/templates-official/steps/generate-sbom.yml new file mode 100644 index 00000000000..488b560e8ba --- /dev/null +++ b/eng/common/templates-official/steps/generate-sbom.yml @@ -0,0 +1,48 @@ +# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated. +# PackageName - The name of the package this SBOM represents. +# PackageVersion - The version of the package this SBOM represents. +# ManifestDirPath - The path of the directory where the generated manifest files will be placed +# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector. + +parameters: + PackageVersion: 7.0.0 + BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + PackageName: '.NET' + ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom + IgnoreDirectories: '' + sbomContinueOnError: true + +steps: +- task: PowerShell@2 + displayName: Prep for SBOM generation in (Non-linux) + condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin')) + inputs: + filePath: ./eng/common/generate-sbom-prep.ps1 + arguments: ${{parameters.manifestDirPath}} + +# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461 +- script: | + chmod +x ./eng/common/generate-sbom-prep.sh + ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}} + displayName: Prep for SBOM generation in (Linux) + condition: eq(variables['Agent.Os'], 'Linux') + continueOnError: ${{ parameters.sbomContinueOnError }} + +- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0 + displayName: 'Generate SBOM manifest' + continueOnError: ${{ parameters.sbomContinueOnError }} + inputs: + PackageName: ${{ parameters.packageName }} + BuildDropPath: ${{ parameters.buildDropPath }} + PackageVersion: ${{ parameters.packageVersion }} + ManifestDirPath: ${{ parameters.manifestDirPath }} + ${{ if ne(parameters.IgnoreDirectories, '') }}: + AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}' + +- task: 1ES.PublishPipelineArtifact@1 + displayName: Publish SBOM manifest + continueOnError: ${{parameters.sbomContinueOnError}} + inputs: + targetPath: '${{parameters.manifestDirPath}}' + artifactName: $(ARTIFACT_NAME) + diff --git a/eng/common/templates-official/steps/publish-logs.yml b/eng/common/templates-official/steps/publish-logs.yml new file mode 100644 index 00000000000..84b2f559c56 --- /dev/null +++ b/eng/common/templates-official/steps/publish-logs.yml @@ -0,0 +1,49 @@ +parameters: + StageLabel: '' + JobLabel: '' + CustomSensitiveDataList: '' + # A default - in case value from eng/common/templates-official/post-build/common-variables.yml is not passed + BinlogToolVersion: '1.0.11' + +steps: +- task: Powershell@2 + displayName: Prepare Binlogs to Upload + inputs: + targetType: inline + script: | + New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ + Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ + continueOnError: true + condition: always() + +- task: PowerShell@2 + displayName: Redact Logs + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1 + # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml + # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt' + # If the file exists - sensitive data for redaction will be sourced from it + # (single entry per line, lines starting with '# ' are considered comments and skipped) + arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs' + -BinlogToolVersion ${{parameters.BinlogToolVersion}} + -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt' + '$(publishing-dnceng-devdiv-code-r-build-re)' + '$(MaestroAccessToken)' + '$(dn-bot-all-orgs-artifact-feeds-rw)' + '$(akams-client-id)' + '$(akams-client-secret)' + '$(microsoft-symbol-server-pat)' + '$(symweb-symbol-server-pat)' + '$(dn-bot-all-orgs-build-rw-code-rw)' + ${{parameters.CustomSensitiveDataList}} + continueOnError: true + condition: always() + +- task: 1ES.PublishBuildArtifacts@1 + displayName: Publish Logs + inputs: + PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs' + PublishLocation: Container + ArtifactName: PostBuildLogs + continueOnError: true + condition: always() diff --git a/eng/common/templates-official/steps/retain-build.yml b/eng/common/templates-official/steps/retain-build.yml new file mode 100644 index 00000000000..83d97a26a01 --- /dev/null +++ b/eng/common/templates-official/steps/retain-build.yml @@ -0,0 +1,28 @@ +parameters: + # Optional azure devops PAT with build execute permissions for the build's organization, + # only needed if the build that should be retained ran on a different organization than + # the pipeline where this template is executing from + Token: '' + # Optional BuildId to retain, defaults to the current running build + BuildId: '' + # Azure devops Organization URI for the build in the https://dev.azure.com/ format. + # Defaults to the organization the current pipeline is running on + AzdoOrgUri: '$(System.CollectionUri)' + # Azure devops project for the build. Defaults to the project the current pipeline is running on + AzdoProject: '$(System.TeamProject)' + +steps: + - task: powershell@2 + inputs: + targetType: 'filePath' + filePath: eng/common/retain-build.ps1 + pwsh: true + arguments: > + -AzdoOrgUri: ${{parameters.AzdoOrgUri}} + -AzdoProject ${{parameters.AzdoProject}} + -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }} + -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}} + displayName: Enable permanent build retention + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + BUILD_ID: $(Build.BuildId) \ No newline at end of file diff --git a/eng/common/templates-official/steps/send-to-helix.yml b/eng/common/templates-official/steps/send-to-helix.yml new file mode 100644 index 00000000000..3eb7e2d5f84 --- /dev/null +++ b/eng/common/templates-official/steps/send-to-helix.yml @@ -0,0 +1,91 @@ +# Please remember to update the documentation if you make changes to these parameters! +parameters: + HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/ + HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/' + HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number + HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues + HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group + HelixConfiguration: '' # optional -- additional property attached to a job + HelixPreCommands: '' # optional -- commands to run before Helix work item execution + HelixPostCommands: '' # optional -- commands to run after Helix work item execution + WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects + WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects + WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects + CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload + XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true + XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects + XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects + XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner + XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects + IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion + DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json + DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json + WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget." + IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set + HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net ) + Creator: '' # optional -- if the build is external, use this to specify who is sending the job + DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO + condition: succeeded() # optional -- condition for step to execute; defaults to succeeded() + continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false + +steps: + - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"' + displayName: ${{ parameters.DisplayNamePrefix }} (Windows) + env: + BuildConfig: $(_BuildConfig) + HelixSource: ${{ parameters.HelixSource }} + HelixType: ${{ parameters.HelixType }} + HelixBuild: ${{ parameters.HelixBuild }} + HelixConfiguration: ${{ parameters.HelixConfiguration }} + HelixTargetQueues: ${{ parameters.HelixTargetQueues }} + HelixAccessToken: ${{ parameters.HelixAccessToken }} + HelixPreCommands: ${{ parameters.HelixPreCommands }} + HelixPostCommands: ${{ parameters.HelixPostCommands }} + WorkItemDirectory: ${{ parameters.WorkItemDirectory }} + WorkItemCommand: ${{ parameters.WorkItemCommand }} + WorkItemTimeout: ${{ parameters.WorkItemTimeout }} + CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} + XUnitProjects: ${{ parameters.XUnitProjects }} + XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} + XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} + XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} + XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} + IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} + DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} + DotNetCliVersion: ${{ parameters.DotNetCliVersion }} + WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} + HelixBaseUri: ${{ parameters.HelixBaseUri }} + Creator: ${{ parameters.Creator }} + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT')) + continueOnError: ${{ parameters.continueOnError }} + - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog + displayName: ${{ parameters.DisplayNamePrefix }} (Unix) + env: + BuildConfig: $(_BuildConfig) + HelixSource: ${{ parameters.HelixSource }} + HelixType: ${{ parameters.HelixType }} + HelixBuild: ${{ parameters.HelixBuild }} + HelixConfiguration: ${{ parameters.HelixConfiguration }} + HelixTargetQueues: ${{ parameters.HelixTargetQueues }} + HelixAccessToken: ${{ parameters.HelixAccessToken }} + HelixPreCommands: ${{ parameters.HelixPreCommands }} + HelixPostCommands: ${{ parameters.HelixPostCommands }} + WorkItemDirectory: ${{ parameters.WorkItemDirectory }} + WorkItemCommand: ${{ parameters.WorkItemCommand }} + WorkItemTimeout: ${{ parameters.WorkItemTimeout }} + CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} + XUnitProjects: ${{ parameters.XUnitProjects }} + XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} + XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} + XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} + XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} + IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} + DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} + DotNetCliVersion: ${{ parameters.DotNetCliVersion }} + WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} + HelixBaseUri: ${{ parameters.HelixBaseUri }} + Creator: ${{ parameters.Creator }} + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT')) + continueOnError: ${{ parameters.continueOnError }} diff --git a/eng/common/templates-official/steps/source-build.yml b/eng/common/templates-official/steps/source-build.yml new file mode 100644 index 00000000000..b1db70842f5 --- /dev/null +++ b/eng/common/templates-official/steps/source-build.yml @@ -0,0 +1,129 @@ +parameters: + # This template adds arcade-powered source-build to CI. + + # This is a 'steps' template, and is intended for advanced scenarios where the existing build + # infra has a careful build methodology that must be followed. For example, a repo + # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline + # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to + # GitHub. Using this steps template leaves room for that infra to be included. + + # Defines the platform on which to run the steps. See 'eng/common/templates-official/job/source-build.yml' + # for details. The entire object is described in the 'job' template for simplicity, even though + # the usage of the properties on this object is split between the 'job' and 'steps' templates. + platform: {} + +steps: +# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.) +- script: | + set -x + df -h + + # If building on the internal project, the artifact feeds variable may be available (usually only if needed) + # In that case, call the feed setup script to add internal feeds corresponding to public ones. + # In addition, add an msbuild argument to copy the WIP from the repo to the target build location. + # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those + # changes. + internalRestoreArgs= + if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then + # Temporarily work around https://github.com/dotnet/arcade/issues/7709 + chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh + $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw) + internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true' + + # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo. + # This only works if there is a username/email configured, which won't be the case in most CI runs. + git config --get user.email + if [ $? -ne 0 ]; then + git config user.email dn-bot@microsoft.com + git config user.name dn-bot + fi + fi + + # If building on the internal project, the internal storage variable may be available (usually only if needed) + # In that case, add variables to allow the download of internal runtimes if the specified versions are not found + # in the default public locations. + internalRuntimeDownloadArgs= + if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then + internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)' + fi + + buildConfig=Release + # Check if AzDO substitutes in a build config from a variable, and use it if so. + if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then + buildConfig='$(_BuildConfig)' + fi + + officialBuildArgs= + if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then + officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)' + fi + + targetRidArgs= + if [ '${{ parameters.platform.targetRID }}' != '' ]; then + targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}' + fi + + runtimeOsArgs= + if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then + runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}' + fi + + baseOsArgs= + if [ '${{ parameters.platform.baseOS }}' != '' ]; then + baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}' + fi + + publishArgs= + if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then + publishArgs='--publish' + fi + + assetManifestFileName=SourceBuild_RidSpecific.xml + if [ '${{ parameters.platform.name }}' != '' ]; then + assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml + fi + + ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \ + --configuration $buildConfig \ + --restore --build --pack $publishArgs -bl \ + $officialBuildArgs \ + $internalRuntimeDownloadArgs \ + $internalRestoreArgs \ + $targetRidArgs \ + $runtimeOsArgs \ + $baseOsArgs \ + /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \ + /p:ArcadeBuildFromSource=true \ + /p:AssetManifestFileName=$assetManifestFileName + displayName: Build + +# Upload build logs for diagnosis. +- task: CopyFiles@2 + displayName: Prepare BuildLogs staging directory + inputs: + SourceFolder: '$(Build.SourcesDirectory)' + Contents: | + **/*.log + **/*.binlog + artifacts/sb/prebuilt-report/** + TargetFolder: '$(Build.StagingDirectory)/BuildLogs' + CleanTargetFolder: true + continueOnError: true + condition: succeededOrFailed() + +- task: 1ES.PublishPipelineArtifact@1 + displayName: Publish BuildLogs + inputs: + targetPath: '$(Build.StagingDirectory)/BuildLogs' + artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt) + continueOnError: true + condition: succeededOrFailed() + +# Manually inject component detection so that we can ignore the source build upstream cache, which contains +# a nupkg cache of input packages (a local feed). +# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir' +# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets +- task: ComponentGovernanceComponentDetection@0 + displayName: Component Detection (Exclude upstream cache) + inputs: + ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache' diff --git a/eng/common/templates-official/variables/pool-providers.yml b/eng/common/templates-official/variables/pool-providers.yml new file mode 100644 index 00000000000..beab7d1bfba --- /dev/null +++ b/eng/common/templates-official/variables/pool-providers.yml @@ -0,0 +1,45 @@ +# Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool, +# otherwise it should go into the "normal" pools. This separates out the queueing and billing of released branches. + +# Motivation: +# Once a given branch of a repository's output has been officially "shipped" once, it is then considered to be COGS +# (Cost of goods sold) and should be moved to a servicing pool provider. This allows both separation of queueing +# (allowing release builds and main PR builds to not intefere with each other) and billing (required for COGS. +# Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services +# team needs to move resources around and create new and potentially differently-named pools. Using this template +# file from an Arcade-ified repo helps guard against both having to update one's release/* branches and renaming. + +# How to use: +# This yaml assumes your shipped product branches use the naming convention "release/..." (which many do). +# If we find alternate naming conventions in broad usage it can be added to the condition below. +# +# First, import the template in an arcade-ified repo to pick up the variables, e.g.: +# +# variables: +# - template: /eng/common/templates-official/variables/pool-providers.yml +# +# ... then anywhere specifying the pool provider use the runtime variables, +# $(DncEngInternalBuildPool) +# +# pool: +# name: $(DncEngInternalBuildPool) +# image: 1es-windows-2022-pt + +variables: + # Coalesce the target and source branches so we know when a PR targets a release branch + # If these variables are somehow missing, fall back to main (tends to have more capacity) + + # Any new -Svc alternative pools should have variables added here to allow for splitting work + + - name: DncEngInternalBuildPool + value: $[ + replace( + replace( + eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), + True, + 'NetCore1ESPool-Svc-Internal' + ), + False, + 'NetCore1ESPool-Internal' + ) + ] \ No newline at end of file diff --git a/eng/common/templates-official/variables/sdl-variables.yml b/eng/common/templates-official/variables/sdl-variables.yml new file mode 100644 index 00000000000..dbdd66d4a4b --- /dev/null +++ b/eng/common/templates-official/variables/sdl-variables.yml @@ -0,0 +1,7 @@ +variables: +# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in +# sync with the packages.config file. +- name: DefaultGuardianVersion + value: 0.109.0 +- name: GuardianPackagesConfigFile + value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config \ No newline at end of file diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml index 3115990d511..bb42240f865 100644 --- a/eng/common/templates/job/publish-build-assets.yml +++ b/eng/common/templates/job/publish-build-assets.yml @@ -58,7 +58,7 @@ jobs: demands: Cmd # If it's not devdiv, it's dnceng ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: - name: $(DncEngInternalBuildPool) + name: NetCore1ESPool-Publishing-Internal demands: ImageOverride -equals windows.vs2019.amd64 steps: @@ -66,7 +66,7 @@ jobs: - checkout: self fetchDepth: 3 clean: true - + - task: DownloadBuildArtifacts@0 displayName: Download artifact inputs: @@ -75,7 +75,7 @@ jobs: checkDownloadedFiles: true condition: ${{ parameters.condition }} continueOnError: ${{ parameters.continueOnError }} - + - task: NuGetAuthenticate@1 - task: PowerShell@2 @@ -90,7 +90,7 @@ jobs: /p:OfficialBuildId=$(Build.BuildNumber) condition: ${{ parameters.condition }} continueOnError: ${{ parameters.continueOnError }} - + - task: powershell@2 displayName: Create ReleaseConfigs Artifact inputs: @@ -99,7 +99,7 @@ jobs: Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId) Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)" Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild) - + - task: PublishBuildArtifacts@1 displayName: Publish ReleaseConfigs Artifact inputs: @@ -125,7 +125,7 @@ jobs: - task: PublishBuildArtifacts@1 displayName: Publish SymbolPublishingExclusionsFile Artifact - condition: eq(variables['SymbolExclusionFile'], 'true') + condition: eq(variables['SymbolExclusionFile'], 'true') inputs: PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt' PublishLocation: Container @@ -141,7 +141,7 @@ jobs: displayName: Publish Using Darc inputs: filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 - arguments: -BuildId $(BARBuildId) + arguments: -BuildId $(BARBuildId) -PublishingInfraVersion 3 -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' -MaestroToken '$(MaestroApiAccessToken)' @@ -152,4 +152,4 @@ jobs: - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: - template: /eng/common/templates/steps/publish-logs.yml parameters: - JobLabel: 'Publish_Artifacts_Logs' + JobLabel: 'Publish_Artifacts_Logs' diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml index bbc010fe732..ee70e2b399c 100644 --- a/eng/common/templates/post-build/post-build.yml +++ b/eng/common/templates/post-build/post-build.yml @@ -39,7 +39,7 @@ parameters: displayName: Enable NuGet validation type: boolean default: true - + - name: publishInstallersAndChecksums displayName: Publish installers and checksums type: boolean @@ -131,8 +131,8 @@ stages: displayName: Validate inputs: filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1 - arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ - -ToolDestinationPath $(Agent.BuildDirectory)/Extract/ + arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ + -ToolDestinationPath $(Agent.BuildDirectory)/Extract/ - job: displayName: Signing Validation @@ -222,9 +222,9 @@ stages: displayName: Validate inputs: filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1 - arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ - -ExtractPath $(Agent.BuildDirectory)/Extract/ - -GHRepoName $(Build.Repository.Name) + arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ + -ExtractPath $(Agent.BuildDirectory)/Extract/ + -GHRepoName $(Build.Repository.Name) -GHCommit $(Build.SourceVersion) -SourcelinkCliVersion $(SourceLinkCLIVersion) continueOnError: true @@ -259,7 +259,7 @@ stages: demands: Cmd # If it's not devdiv, it's dnceng ${{ else }}: - name: $(DncEngInternalBuildPool) + name: NetCore1ESPool-Publishing-Internal demands: ImageOverride -equals windows.vs2019.amd64 steps: - template: setup-maestro-vars.yml @@ -273,7 +273,7 @@ stages: displayName: Publish Using Darc inputs: filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 - arguments: -BuildId $(BARBuildId) + arguments: -BuildId $(BARBuildId) -PublishingInfraVersion ${{ parameters.publishingInfraVersion }} -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' -MaestroToken '$(MaestroApiAccessToken)' diff --git a/eng/common/templates/steps/send-to-helix.yml b/eng/common/templates/steps/send-to-helix.yml index 3eb7e2d5f84..68fa739c4ab 100644 --- a/eng/common/templates/steps/send-to-helix.yml +++ b/eng/common/templates/steps/send-to-helix.yml @@ -5,6 +5,8 @@ parameters: HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group + HelixProjectPath: 'eng/common/helixpublish.proj' # optional -- path to the project file to build relative to BUILD_SOURCESDIRECTORY + HelixProjectArguments: '' # optional -- arguments passed to the build command HelixConfiguration: '' # optional -- additional property attached to a job HelixPreCommands: '' # optional -- commands to run before Helix work item execution HelixPostCommands: '' # optional -- commands to run after Helix work item execution @@ -29,7 +31,7 @@ parameters: continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false steps: - - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"' + - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"' displayName: ${{ parameters.DisplayNamePrefix }} (Windows) env: BuildConfig: $(_BuildConfig) @@ -59,7 +61,7 @@ steps: SYSTEM_ACCESSTOKEN: $(System.AccessToken) condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT')) continueOnError: ${{ parameters.continueOnError }} - - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog + - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog displayName: ${{ parameters.DisplayNamePrefix }} (Unix) env: BuildConfig: $(_BuildConfig) diff --git a/eng/common/templates/variables/pool-providers.yml b/eng/common/templates/variables/pool-providers.yml index 9cc5c550d3b..d236f9fdbb1 100644 --- a/eng/common/templates/variables/pool-providers.yml +++ b/eng/common/templates/variables/pool-providers.yml @@ -1,15 +1,15 @@ -# Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool, +# Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool, # otherwise it should go into the "normal" pools. This separates out the queueing and billing of released branches. -# Motivation: +# Motivation: # Once a given branch of a repository's output has been officially "shipped" once, it is then considered to be COGS # (Cost of goods sold) and should be moved to a servicing pool provider. This allows both separation of queueing # (allowing release builds and main PR builds to not intefere with each other) and billing (required for COGS. -# Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services -# team needs to move resources around and create new and potentially differently-named pools. Using this template +# Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services +# team needs to move resources around and create new and potentially differently-named pools. Using this template # file from an Arcade-ified repo helps guard against both having to update one's release/* branches and renaming. -# How to use: +# How to use: # This yaml assumes your shipped product branches use the naming convention "release/..." (which many do). # If we find alternate naming conventions in broad usage it can be added to the condition below. # @@ -54,4 +54,4 @@ variables: False, 'NetCore1ESPool-Internal' ) - ] \ No newline at end of file + ] diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index 7d8dc89b919..9bf873e3c25 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -65,6 +65,11 @@ $ErrorActionPreference = 'Stop' # Base-64 encoded SAS token that has permission to storage container described by $runtimeSourceFeed [string]$runtimeSourceFeedKey = if (Test-Path variable:runtimeSourceFeedKey) { $runtimeSourceFeedKey } else { $null } +# True if the build is a product build +[bool]$productBuild = if (Test-Path variable:productBuild) { $productBuild } else { $false } + +[String[]]$properties = if (Test-Path variable:properties) { $properties } else { @() } + function Create-Directory ([string[]] $path) { New-Item -Path $path -Force -ItemType 'Directory' | Out-Null } @@ -850,7 +855,8 @@ function MSBuild-Core() { } # When running on Azure Pipelines, override the returned exit code to avoid double logging. - if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null) { + # Skip this when the build is a child of the VMR orchestrator build. + if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null -and !$productBuild -and $properties -notlike "*DotNetBuildRepo=true*") { Write-PipelineSetResult -Result "Failed" -Message "msbuild execution failed." # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error diff --git a/eng/common/tools.sh b/eng/common/tools.sh index ece4b730795..db64e298ff6 100644 --- a/eng/common/tools.sh +++ b/eng/common/tools.sh @@ -68,6 +68,9 @@ fi runtime_source_feed=${runtime_source_feed:-''} runtime_source_feed_key=${runtime_source_feed_key:-''} +# True if the build is a product build +product_build=${product_build:-false} + # Resolve any symlinks in the given path. function ResolvePath { local path=$1 @@ -141,7 +144,7 @@ function InitializeDotNetCli { if [[ $global_json_has_runtimes == false && -n "${DOTNET_INSTALL_DIR:-}" && -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then dotnet_root="$DOTNET_INSTALL_DIR" else - dotnet_root="$repo_root/.dotnet" + dotnet_root="${repo_root}.dotnet" export DOTNET_INSTALL_DIR="$dotnet_root" @@ -503,7 +506,8 @@ function MSBuild-Core { echo "Build failed with exit code $exit_code. Check errors above." # When running on Azure Pipelines, override the returned exit code to avoid double logging. - if [[ "$ci" == "true" && -n ${SYSTEM_TEAMPROJECT:-} ]]; then + # Skip this when the build is a child of the VMR orchestrator build. + if [[ "$ci" == true && -n ${SYSTEM_TEAMPROJECT:-} && "$product_build" != true && "$properties" != *"DotNetBuildRepo=true"* ]]; then Write-PipelineSetResult -result "Failed" -message "msbuild execution failed." # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error diff --git a/eng/dependabot/Directory.Build.props b/eng/dependabot/Directory.Build.props index b1987371bd5..30218d4856e 100644 --- a/eng/dependabot/Directory.Build.props +++ b/eng/dependabot/Directory.Build.props @@ -1,4 +1,12 @@ - - + + + false + false + diff --git a/eng/dependabot/NuGet.config b/eng/dependabot/NuGet.config deleted file mode 120000 index f0aeba16780..00000000000 --- a/eng/dependabot/NuGet.config +++ /dev/null @@ -1 +0,0 @@ -../../NuGet.config \ No newline at end of file diff --git a/eng/dependabot/Versions.props b/eng/dependabot/Versions.props index befaf63d821..4ce9f25830e 100644 --- a/eng/dependabot/Versions.props +++ b/eng/dependabot/Versions.props @@ -1,27 +1,8 @@ - - - - 1.10.4 - 12.19.1 - 12.17.1 - 2.16.1 - 1.6.11 - 4.3.2 - 5.0.0 - - - 13.0.3 - 10.9.0 - 6.5.0 - 3.5.8.1 - - - 4.18.4 - DynamicProxyGenAssembly2 - 0024000004800000940000000602000000240000525341310004000001000100c547cac37abd99c8db225ef2f6c8a3602f3b3606cc9891605d02baa56104f4cfc0734aa39b93bf7852f7d9266654753cc297e7d2edfe0bac1cdcf9f717241550e0a7b191195b7667bb4f64bcb8e2121380fd1d9d46ad2d92d2d15605093924cceaf74c4861eff62abf69b9291ed0a340e113be11e6a7d3113e92484cf7045cc7 - + + + + + + diff --git a/eng/dependabot/dependabot.csproj b/eng/dependabot/dependabot.csproj deleted file mode 100644 index d6029d18934..00000000000 --- a/eng/dependabot/dependabot.csproj +++ /dev/null @@ -1,2 +0,0 @@ - - diff --git a/eng/dependabot/independent/Directory.Build.props b/eng/dependabot/independent/Directory.Build.props new file mode 100644 index 00000000000..fe86741bd6c --- /dev/null +++ b/eng/dependabot/independent/Directory.Build.props @@ -0,0 +1,5 @@ + + + + + diff --git a/eng/dependabot/independent/NuGet.config b/eng/dependabot/independent/NuGet.config new file mode 120000 index 00000000000..941555881f8 --- /dev/null +++ b/eng/dependabot/independent/NuGet.config @@ -0,0 +1 @@ +../../../NuGet.config \ No newline at end of file diff --git a/eng/dependabot/Packages.props b/eng/dependabot/independent/Packages.props similarity index 89% rename from eng/dependabot/Packages.props rename to eng/dependabot/independent/Packages.props index 6c90f3e412a..e9ae84fb19b 100644 --- a/eng/dependabot/Packages.props +++ b/eng/dependabot/independent/Packages.props @@ -1,6 +1,6 @@ @@ -8,7 +8,6 @@ - @@ -19,5 +18,6 @@ + diff --git a/eng/dependabot/independent/Versions.props b/eng/dependabot/independent/Versions.props new file mode 100644 index 00000000000..39b033bb021 --- /dev/null +++ b/eng/dependabot/independent/Versions.props @@ -0,0 +1,27 @@ + + + + + 1.10.4 + 12.19.1 + 12.17.1 + 2.17.1 + 1.6.13 + 4.3.2 + 5.0.0 + + + 13.0.3 + 11.0.0 + 6.5.0 + 3.7.305.7 + + + 4.18.4 + DynamicProxyGenAssembly2 + 0024000004800000940000000602000000240000525341310004000001000100c547cac37abd99c8db225ef2f6c8a3602f3b3606cc9891605d02baa56104f4cfc0734aa39b93bf7852f7d9266654753cc297e7d2edfe0bac1cdcf9f717241550e0a7b191195b7667bb4f64bcb8e2121380fd1d9d46ad2d92d2d15605093924cceaf74c4861eff62abf69b9291ed0a340e113be11e6a7d3113e92484cf7045cc7 + + diff --git a/eng/dependabot/independent/dependabot.csproj b/eng/dependabot/independent/dependabot.csproj new file mode 100644 index 00000000000..4f10237eb1b --- /dev/null +++ b/eng/dependabot/independent/dependabot.csproj @@ -0,0 +1,6 @@ + + + + net6.0 + + diff --git a/eng/dependabot/net6.0/Directory.Build.props b/eng/dependabot/net6.0/Directory.Build.props index b1987371bd5..fe86741bd6c 100644 --- a/eng/dependabot/net6.0/Directory.Build.props +++ b/eng/dependabot/net6.0/Directory.Build.props @@ -1,4 +1,5 @@ - + + diff --git a/eng/dependabot/net6.0/NuGet.config b/eng/dependabot/net6.0/NuGet.config deleted file mode 100644 index 941555881f8..00000000000 --- a/eng/dependabot/net6.0/NuGet.config +++ /dev/null @@ -1 +0,0 @@ -../../../NuGet.config \ No newline at end of file diff --git a/eng/dependabot/net6.0/NuGet.config b/eng/dependabot/net6.0/NuGet.config new file mode 120000 index 00000000000..941555881f8 --- /dev/null +++ b/eng/dependabot/net6.0/NuGet.config @@ -0,0 +1 @@ +../../../NuGet.config \ No newline at end of file diff --git a/eng/dependabot/net6.0/Packages.props b/eng/dependabot/net6.0/Packages.props index 10247d15f81..765df3ba371 100644 --- a/eng/dependabot/net6.0/Packages.props +++ b/eng/dependabot/net6.0/Packages.props @@ -7,6 +7,11 @@ - + + diff --git a/eng/dependabot/net6.0/Versions.props b/eng/dependabot/net6.0/Versions.props index d3ee0d85e2d..6f61d70caba 100644 --- a/eng/dependabot/net6.0/Versions.props +++ b/eng/dependabot/net6.0/Versions.props @@ -10,6 +10,6 @@ 6.0.0 - 6.0.26 + 6.0.27 diff --git a/eng/dependabot/net6.0/dependabot.csproj b/eng/dependabot/net6.0/dependabot.csproj index d6029d18934..4f10237eb1b 100644 --- a/eng/dependabot/net6.0/dependabot.csproj +++ b/eng/dependabot/net6.0/dependabot.csproj @@ -1,2 +1,6 @@ - + + + net6.0 + + diff --git a/eng/dependabot/net7.0/Directory.Build.props b/eng/dependabot/net7.0/Directory.Build.props index b1987371bd5..fe86741bd6c 100644 --- a/eng/dependabot/net7.0/Directory.Build.props +++ b/eng/dependabot/net7.0/Directory.Build.props @@ -1,4 +1,5 @@ - + + diff --git a/eng/dependabot/net7.0/NuGet.config b/eng/dependabot/net7.0/NuGet.config deleted file mode 100644 index 941555881f8..00000000000 --- a/eng/dependabot/net7.0/NuGet.config +++ /dev/null @@ -1 +0,0 @@ -../../../NuGet.config \ No newline at end of file diff --git a/eng/dependabot/net7.0/NuGet.config b/eng/dependabot/net7.0/NuGet.config new file mode 120000 index 00000000000..941555881f8 --- /dev/null +++ b/eng/dependabot/net7.0/NuGet.config @@ -0,0 +1 @@ +../../../NuGet.config \ No newline at end of file diff --git a/eng/dependabot/net7.0/Packages.props b/eng/dependabot/net7.0/Packages.props index a63f247f84a..58a73e936bb 100644 --- a/eng/dependabot/net7.0/Packages.props +++ b/eng/dependabot/net7.0/Packages.props @@ -7,6 +7,11 @@ - + + diff --git a/eng/dependabot/net7.0/Versions.props b/eng/dependabot/net7.0/Versions.props index 9081b609ba5..4943500e648 100644 --- a/eng/dependabot/net7.0/Versions.props +++ b/eng/dependabot/net7.0/Versions.props @@ -10,6 +10,6 @@ 7.0.0 - 7.0.15 + 7.0.16 diff --git a/eng/dependabot/net7.0/dependabot.csproj b/eng/dependabot/net7.0/dependabot.csproj index d6029d18934..a039286ea55 100644 --- a/eng/dependabot/net7.0/dependabot.csproj +++ b/eng/dependabot/net7.0/dependabot.csproj @@ -1,2 +1,6 @@ - + + + net7.0 + + diff --git a/eng/dependabot/net8.0/Directory.Build.props b/eng/dependabot/net8.0/Directory.Build.props index b1987371bd5..fe86741bd6c 100644 --- a/eng/dependabot/net8.0/Directory.Build.props +++ b/eng/dependabot/net8.0/Directory.Build.props @@ -1,4 +1,5 @@ - + + diff --git a/eng/dependabot/net8.0/NuGet.config b/eng/dependabot/net8.0/NuGet.config deleted file mode 100644 index 941555881f8..00000000000 --- a/eng/dependabot/net8.0/NuGet.config +++ /dev/null @@ -1 +0,0 @@ -../../../NuGet.config \ No newline at end of file diff --git a/eng/dependabot/net8.0/NuGet.config b/eng/dependabot/net8.0/NuGet.config new file mode 120000 index 00000000000..941555881f8 --- /dev/null +++ b/eng/dependabot/net8.0/NuGet.config @@ -0,0 +1 @@ +../../../NuGet.config \ No newline at end of file diff --git a/eng/dependabot/net8.0/Packages.props b/eng/dependabot/net8.0/Packages.props index 8102a0c8441..c0b69ea13ce 100644 --- a/eng/dependabot/net8.0/Packages.props +++ b/eng/dependabot/net8.0/Packages.props @@ -7,6 +7,11 @@ - + + diff --git a/eng/dependabot/net8.0/Versions.props b/eng/dependabot/net8.0/Versions.props index bd2a060ebc6..64a1ff17ad4 100644 --- a/eng/dependabot/net8.0/Versions.props +++ b/eng/dependabot/net8.0/Versions.props @@ -10,6 +10,6 @@ 8.0.0 - 8.0.1 + 8.0.2 diff --git a/eng/dependabot/net8.0/dependabot.csproj b/eng/dependabot/net8.0/dependabot.csproj index d6029d18934..f4ca0368fe9 100644 --- a/eng/dependabot/net8.0/dependabot.csproj +++ b/eng/dependabot/net8.0/dependabot.csproj @@ -1,2 +1,6 @@ - + + + net8.0 + + diff --git a/eng/dependabot/netcoreapp3.1/Directory.Build.props b/eng/dependabot/netcoreapp3.1/Directory.Build.props index b1987371bd5..fe86741bd6c 100644 --- a/eng/dependabot/netcoreapp3.1/Directory.Build.props +++ b/eng/dependabot/netcoreapp3.1/Directory.Build.props @@ -1,4 +1,5 @@ - + + diff --git a/eng/dependabot/netcoreapp3.1/NuGet.config b/eng/dependabot/netcoreapp3.1/NuGet.config deleted file mode 100644 index 941555881f8..00000000000 --- a/eng/dependabot/netcoreapp3.1/NuGet.config +++ /dev/null @@ -1 +0,0 @@ -../../../NuGet.config \ No newline at end of file diff --git a/eng/dependabot/netcoreapp3.1/NuGet.config b/eng/dependabot/netcoreapp3.1/NuGet.config new file mode 120000 index 00000000000..941555881f8 --- /dev/null +++ b/eng/dependabot/netcoreapp3.1/NuGet.config @@ -0,0 +1 @@ +../../../NuGet.config \ No newline at end of file diff --git a/eng/dependabot/netcoreapp3.1/Packages.props b/eng/dependabot/netcoreapp3.1/Packages.props index 03e49cc458c..ba18bea97f4 100644 --- a/eng/dependabot/netcoreapp3.1/Packages.props +++ b/eng/dependabot/netcoreapp3.1/Packages.props @@ -3,6 +3,11 @@ Packages in this file have versions updated periodically by Dependabot specifically for .NET Core 3.1. --> - + + diff --git a/eng/dependabot/netcoreapp3.1/dependabot.csproj b/eng/dependabot/netcoreapp3.1/dependabot.csproj index d6029d18934..f7ab42ca685 100644 --- a/eng/dependabot/netcoreapp3.1/dependabot.csproj +++ b/eng/dependabot/netcoreapp3.1/dependabot.csproj @@ -1,2 +1,6 @@ - + + + netcoreapp3.1 + + diff --git a/eng/dependabot/nuget.org/Directory.Build.props b/eng/dependabot/nuget.org/Directory.Build.props index 80198295e85..fe86741bd6c 100644 --- a/eng/dependabot/nuget.org/Directory.Build.props +++ b/eng/dependabot/nuget.org/Directory.Build.props @@ -1,4 +1,5 @@ - + + diff --git a/eng/dependabot/nuget.org/Packages.props b/eng/dependabot/nuget.org/Packages.props index d10218b507f..6ec71151d26 100644 --- a/eng/dependabot/nuget.org/Packages.props +++ b/eng/dependabot/nuget.org/Packages.props @@ -5,17 +5,17 @@ - + diff --git a/eng/dependabot/nuget.org/Versions.props b/eng/dependabot/nuget.org/Versions.props index f6d66dd72ba..285eba92209 100644 --- a/eng/dependabot/nuget.org/Versions.props +++ b/eng/dependabot/nuget.org/Versions.props @@ -2,6 +2,6 @@ - 8.0.452401 + 8.0.510501 diff --git a/eng/dependabot/nuget.org/dependabot.csproj b/eng/dependabot/nuget.org/dependabot.csproj index d6029d18934..4f10237eb1b 100644 --- a/eng/dependabot/nuget.org/dependabot.csproj +++ b/eng/dependabot/nuget.org/dependabot.csproj @@ -1,2 +1,6 @@ - + + + net6.0 + + diff --git a/eng/pipelines/scripts/Copy-ApiScanEligible.ps1 b/eng/pipelines/scripts/Copy-ApiScanEligible.ps1 index 4e58cbfcf39..df92d4dfab0 100644 --- a/eng/pipelines/scripts/Copy-ApiScanEligible.ps1 +++ b/eng/pipelines/scripts/Copy-ApiScanEligible.ps1 @@ -29,45 +29,6 @@ function Copy-File() { $script:copyCount++ } -# Check if the file is a ARM64 PE file -function Test-PeArm64() { - param( - [System.IO.FileInfo] $FileInfo - ) - $stream = [System.IO.File]::OpenRead($FileInfo.FullName) - $reader = [System.IO.BinaryReader]($stream) - # https://learn.microsoft.com/en-us/windows/win32/debug/pe-format - # MS DOS Magic Number - if (0x5A4D -ne $reader.ReadUInt16()) { - $reader.Dispose() - return $false - } - $stream.Position = 0x3C # NT Header offset, 4 bytes - $ntHeaderOffset = $reader.ReadUInt32() - $stream.Position = $ntHeaderOffset - # PE Magic Number, 4 bytes - if (0x00004550 -ne $reader.ReadUInt32()) { - $reader.Dispose() - return $false; - } - # Machine type, 2 bytes - # 0xAA64 is ARM64 - $isArm64 = 0xAA64 -eq $reader.ReadUInt16(); - $reader.Dispose() - return $isArm64 -} - -function Copy-Executable() { - param( - [System.IO.FileInfo] $FileInfo - ) - if (Test-PeArm64 -FileInfo $fileInfo) { - Skip-File -FileInfo $fileInfo -Reason "ARM64 PE" # ARM64 is not supported by ApiScan - } else { - Copy-File -FileInfo $fileInfo - } -} - function Skip-File() { param( [System.IO.FileInfo] $FileInfo, @@ -83,9 +44,9 @@ foreach ($fileInfo in (Get-ChildItem $SourcePath -Recurse -Attributes !Directory if ($fileInfo.Directory.FullName.Contains('symbols')) { # Skip symbols packages Skip-File -FileInfo $fileInfo -Reason "Symbols" } elseif ($fileInfo.Extension -eq ".dll") { # Library - Copy-Executable -FileInfo $fileInfo + Copy-File -FileInfo $fileInfo } elseif ($fileInfo.Extension -eq ".exe") { # Executable - Copy-Executable -FileInfo $fileInfo + Copy-File -FileInfo $fileInfo } elseif ($fileInfo.Extension -eq ".pdb") { # Program database Copy-File -FileInfo $fileInfo } else { diff --git a/global.json b/global.json index a658ae62e0a..f4ec64626fe 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "tools": { - "dotnet": "9.0.100-preview.1.24076.14", + "dotnet": "9.0.100-preview.1.24101.2", "runtimes": { "aspnetcore": [ "$(MicrosoftAspNetCoreApp60Version)", @@ -31,7 +31,7 @@ }, "msbuild-sdks": { "Microsoft.Build.NoTargets": "3.7.0", - "Microsoft.DotNet.Arcade.Sdk": "9.0.0-beta.24105.3", - "Microsoft.DotNet.Helix.Sdk": "9.0.0-beta.24105.3" + "Microsoft.DotNet.Arcade.Sdk": "9.0.0-beta.24151.5", + "Microsoft.DotNet.Helix.Sdk": "9.0.0-beta.24151.5" } } diff --git a/src/Directory.Build.props b/src/Directory.Build.props index d283687da5a..8779465660c 100644 --- a/src/Directory.Build.props +++ b/src/Directory.Build.props @@ -15,7 +15,7 @@ osx-arm64;osx-x64 $(SignOnlyRuntimeIdentifiers);$(SignAndNotarizeRuntimeIdentifiers) false - 15 + 25 diff --git a/src/Extensions/S3Storage/S3Storage.cs b/src/Extensions/S3Storage/S3Storage.cs index 1442f984157..a743c87407b 100644 --- a/src/Extensions/S3Storage/S3Storage.cs +++ b/src/Extensions/S3Storage/S3Storage.cs @@ -7,6 +7,7 @@ using Amazon.S3; using Amazon.S3.Model; using Amazon.S3.Transfer; +using Amazon.S3.Util; using Microsoft.Diagnostics.Monitoring.Extension.Common; using System; using System.Collections.Generic; @@ -73,7 +74,7 @@ public static async Task CreateAsync(S3StorageEgressProviderOptions throw new AmazonClientException("Failed to find AWS Credentials for constructing AWS service client"); IAmazonS3 s3Client = new AmazonS3Client(awsCredentials, configuration); - bool exists = await s3Client.DoesS3BucketExistAsync(options.BucketName); + bool exists = await AmazonS3Util.DoesS3BucketExistV2Async(s3Client, options.BucketName); if (!exists) await s3Client.PutBucketAsync(options.BucketName, cancellationToken); return new S3Storage(s3Client, options.BucketName, settings.Name, settings.ContentType); diff --git a/src/Microsoft.Diagnostics.Monitoring.HostingStartup/ParameterCapturing/FunctionProbes/FunctionProbesStub.cs b/src/Microsoft.Diagnostics.Monitoring.HostingStartup/ParameterCapturing/FunctionProbes/FunctionProbesStub.cs index e11f953a567..6d2cb53b482 100644 --- a/src/Microsoft.Diagnostics.Monitoring.HostingStartup/ParameterCapturing/FunctionProbes/FunctionProbesStub.cs +++ b/src/Microsoft.Diagnostics.Monitoring.HostingStartup/ParameterCapturing/FunctionProbes/FunctionProbesStub.cs @@ -31,7 +31,7 @@ public static void EnterProbeStub(ulong uniquifier, object[] args) try { s_inProbe = true; - probes.EnterProbe(uniquifier, args); + _ = probes.EnterProbe(uniquifier, args); } finally { diff --git a/src/Microsoft.Diagnostics.Monitoring.HostingStartup/ParameterCapturing/FunctionProbes/IFunctionProbes.cs b/src/Microsoft.Diagnostics.Monitoring.HostingStartup/ParameterCapturing/FunctionProbes/IFunctionProbes.cs index 1ddd0a93b48..b33985f23e9 100644 --- a/src/Microsoft.Diagnostics.Monitoring.HostingStartup/ParameterCapturing/FunctionProbes/IFunctionProbes.cs +++ b/src/Microsoft.Diagnostics.Monitoring.HostingStartup/ParameterCapturing/FunctionProbes/IFunctionProbes.cs @@ -11,6 +11,11 @@ internal interface IFunctionProbes { public void CacheMethods(IList methods); - public void EnterProbe(ulong uniquifier, object[] args); + /// + /// + /// The uniquifier which identifies the method calling the probe. + /// The arguments passed into the method. + /// True if the the arguments were captured by the probe. + public bool EnterProbe(ulong uniquifier, object[] args); } } diff --git a/src/Microsoft.Diagnostics.Monitoring.HostingStartup/ParameterCapturing/FunctionProbes/LogEmittingProbes.cs b/src/Microsoft.Diagnostics.Monitoring.HostingStartup/ParameterCapturing/FunctionProbes/LogEmittingProbes.cs index 52cb68626ba..8a3682925ce 100644 --- a/src/Microsoft.Diagnostics.Monitoring.HostingStartup/ParameterCapturing/FunctionProbes/LogEmittingProbes.cs +++ b/src/Microsoft.Diagnostics.Monitoring.HostingStartup/ParameterCapturing/FunctionProbes/LogEmittingProbes.cs @@ -27,7 +27,7 @@ public void CacheMethods(IList methods) } } - public void EnterProbe(ulong uniquifier, object[] args) + public bool EnterProbe(ulong uniquifier, object[] args) { // We allow the instrumentation of system types, but these types can also be part of an ILogger implementation. // In addition, certain loggers don't log directly, but into a background thread. @@ -42,7 +42,7 @@ public void EnterProbe(ulong uniquifier, object[] args) if (!_logger.ShouldLog()) { - return; + return false; } FunctionProbesState? state = FunctionProbesStub.State; @@ -51,12 +51,12 @@ public void EnterProbe(ulong uniquifier, object[] args) !state.InstrumentedMethods.TryGetValue(uniquifier, out InstrumentedMethod? instrumentedMethod) || args.Length != instrumentedMethod?.SupportedParameters.Length) { - return; + return false; } if (instrumentedMethod.CaptureMode == ParameterCaptureMode.Disallowed) { - return; + return false; } string[] argValues; @@ -87,6 +87,8 @@ public void EnterProbe(ulong uniquifier, object[] args) } _logger.Log(instrumentedMethod.CaptureMode, instrumentedMethod.MethodTemplateString, argValues); + + return true; } } } diff --git a/src/Microsoft.Diagnostics.Monitoring.HostingStartup/ParameterCapturing/Pipeline/CaptureLimitPolicyProbes.cs b/src/Microsoft.Diagnostics.Monitoring.HostingStartup/ParameterCapturing/Pipeline/CaptureLimitPolicyProbes.cs new file mode 100644 index 00000000000..ea9815e121f --- /dev/null +++ b/src/Microsoft.Diagnostics.Monitoring.HostingStartup/ParameterCapturing/Pipeline/CaptureLimitPolicyProbes.cs @@ -0,0 +1,49 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using Microsoft.Diagnostics.Monitoring.HostingStartup.ParameterCapturing.FunctionProbes; +using System.Collections.Generic; +using System.Reflection; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.Diagnostics.Monitoring.HostingStartup.ParameterCapturing.Pipeline +{ + internal sealed class CaptureLimitPolicyProbes : IFunctionProbes + { + private readonly IFunctionProbes _probes; + private readonly int _captureLimit; + private readonly TaskCompletionSource _stopRequest; + + private int _captureCount; + private bool _stopped; + + public CaptureLimitPolicyProbes(IFunctionProbes probes, int captureLimit, TaskCompletionSource stopRequest) + { + _probes = probes; + _captureLimit = captureLimit; + _stopRequest = stopRequest; + } + + public void CacheMethods(IList methods) => _probes.CacheMethods(methods); + + public bool EnterProbe(ulong uniquifier, object[] args) + { + // In addition to the stop request, use a flag to more quickly react to the limit being reached, + // limiting the amount of extra data being captured which is important in the case of hot paths. + if (_stopped) + { + return false; + } + + bool didCapture = _probes.EnterProbe(uniquifier, args); + if (didCapture && Interlocked.Increment(ref _captureCount) == _captureLimit) + { + _stopped = true; + _ = _stopRequest.TrySetResult(); + } + + return didCapture; + } + } +} diff --git a/src/Microsoft.Diagnostics.Monitoring.HostingStartup/ParameterCapturing/Pipeline/ParameterCapturingPipeline.cs b/src/Microsoft.Diagnostics.Monitoring.HostingStartup/ParameterCapturing/Pipeline/ParameterCapturingPipeline.cs index 9f5e27dfdbe..d2df9465d73 100644 --- a/src/Microsoft.Diagnostics.Monitoring.HostingStartup/ParameterCapturing/Pipeline/ParameterCapturingPipeline.cs +++ b/src/Microsoft.Diagnostics.Monitoring.HostingStartup/ParameterCapturing/Pipeline/ParameterCapturingPipeline.cs @@ -22,8 +22,11 @@ private sealed class CapturingRequest public CapturingRequest(StartCapturingParametersPayload payload, IFunctionProbes probes) { Payload = payload; - Probes = probes; StopRequest = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); + + Probes = payload.Configuration.CaptureLimit.HasValue + ? new CaptureLimitPolicyProbes(probes, payload.Configuration.CaptureLimit.Value, StopRequest) + : probes; } public StartCapturingParametersPayload Payload { get; } @@ -169,6 +172,11 @@ public void SubmitRequest(StartCapturingParametersPayload payload, IFunctionProb throw new ArgumentException(nameof(payload.Configuration.Methods)); } + if (payload.Configuration.CaptureLimit.HasValue && payload.Configuration.CaptureLimit.Value <= 0) + { + throw new ArgumentException(nameof(payload.Configuration.CaptureLimit)); + } + List _deniedMethodDescriptions = new(); foreach (MethodDescription methodDescription in payload.Configuration.Methods) { diff --git a/src/Microsoft.Diagnostics.Monitoring.Options/AzureAdOptions.cs b/src/Microsoft.Diagnostics.Monitoring.Options/AzureAdOptions.cs index b788bcff825..a696a70bdd2 100644 --- a/src/Microsoft.Diagnostics.Monitoring.Options/AzureAdOptions.cs +++ b/src/Microsoft.Diagnostics.Monitoring.Options/AzureAdOptions.cs @@ -38,10 +38,5 @@ internal sealed partial class AzureAdOptions Description = nameof(OptionsDisplayStrings.DisplayAttributeDescription_AzureAdOptions_RequiredRole))] [Required] public string RequiredRole { get; set; } - - [Display( - ResourceType = typeof(OptionsDisplayStrings), - Description = nameof(OptionsDisplayStrings.DisplayAttributeDescription_AzureAdOptions_SwaggerScope))] - public string SwaggerScope { get; set; } } } diff --git a/src/Microsoft.Diagnostics.Monitoring.Options/OptionsDisplayStrings.Designer.cs b/src/Microsoft.Diagnostics.Monitoring.Options/OptionsDisplayStrings.Designer.cs index dcf8b614dcc..07c5a1057c0 100644 --- a/src/Microsoft.Diagnostics.Monitoring.Options/OptionsDisplayStrings.Designer.cs +++ b/src/Microsoft.Diagnostics.Monitoring.Options/OptionsDisplayStrings.Designer.cs @@ -240,15 +240,6 @@ public static string DisplayAttributeDescription_AzureAdOptions_RequiredRole { } } - /// - /// Looks up a localized string similar to The API scope required by users to be able to interactively authenticate using the in-box Swagger UI. If not specified, users will not be able to interactively authenticate.. - /// - public static string DisplayAttributeDescription_AzureAdOptions_SwaggerScope { - get { - return ResourceManager.GetString("DisplayAttributeDescription_AzureAdOptions_SwaggerScope", resourceCulture); - } - } - /// /// Looks up a localized string similar to The tenant id of the Azure Active Directory tenant, or its tenant domain.. /// diff --git a/src/Microsoft.Diagnostics.Monitoring.Options/OptionsDisplayStrings.resx b/src/Microsoft.Diagnostics.Monitoring.Options/OptionsDisplayStrings.resx index 814494a9d77..5e5300bb214 100644 --- a/src/Microsoft.Diagnostics.Monitoring.Options/OptionsDisplayStrings.resx +++ b/src/Microsoft.Diagnostics.Monitoring.Options/OptionsDisplayStrings.resx @@ -712,10 +712,6 @@ The role required to be able to authenticate. The description provided for the RequiredRole parameter on AzureAdOptions. - - The API scope required by users to be able to interactively authenticate using the in-box Swagger UI. If not specified, users will not be able to interactively authenticate. - The description provided for the SwaggerScope parameter on AzureAdOptions. - The tenant id of the Azure Active Directory tenant, or its tenant domain. The description provided for the TenantId parameter on AzureAdOptions. @@ -770,4 +766,7 @@ The filters that determine which exceptions should be included/excluded when collecting exceptions. + + The filters that determine which exceptions should be included/excluded when collecting exceptions. + \ No newline at end of file diff --git a/src/Microsoft.Diagnostics.Monitoring.WebApi/Models/CaptureParametersConfiguration.cs b/src/Microsoft.Diagnostics.Monitoring.WebApi/Models/CaptureParametersConfiguration.cs index 4e4410cfe2b..e967a080b30 100644 --- a/src/Microsoft.Diagnostics.Monitoring.WebApi/Models/CaptureParametersConfiguration.cs +++ b/src/Microsoft.Diagnostics.Monitoring.WebApi/Models/CaptureParametersConfiguration.cs @@ -19,5 +19,9 @@ public class CaptureParametersConfiguration [JsonPropertyName("useDebuggerDisplayAttribute")] public bool UseDebuggerDisplayAttribute { get; set; } = true; + + [JsonPropertyName("captureLimit")] + [Range(1, int.MaxValue)] + public int? CaptureLimit { get; set; } } } diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.ConfigurationSchema/Microsoft.Diagnostics.Monitoring.ConfigurationSchema.csproj b/src/Tests/Microsoft.Diagnostics.Monitoring.ConfigurationSchema/Microsoft.Diagnostics.Monitoring.ConfigurationSchema.csproj index a232376963e..a7af91ecb20 100644 --- a/src/Tests/Microsoft.Diagnostics.Monitoring.ConfigurationSchema/Microsoft.Diagnostics.Monitoring.ConfigurationSchema.csproj +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.ConfigurationSchema/Microsoft.Diagnostics.Monitoring.ConfigurationSchema.csproj @@ -78,6 +78,7 @@ + diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.ConfigurationSchema/SchemaGenerator.cs b/src/Tests/Microsoft.Diagnostics.Monitoring.ConfigurationSchema/SchemaGenerator.cs index c5a0bcca8e0..3770ce11241 100644 --- a/src/Tests/Microsoft.Diagnostics.Monitoring.ConfigurationSchema/SchemaGenerator.cs +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.ConfigurationSchema/SchemaGenerator.cs @@ -14,6 +14,7 @@ using Newtonsoft.Json.Converters; using NJsonSchema; using NJsonSchema.Generation; +using NJsonSchema.NewtonsoftJson.Generation; using System; using System.Collections.Generic; using System.Linq; @@ -306,9 +307,14 @@ public GenerationContext(JsonSchema rootSchema) { Schema = rootSchema; - _settings = new JsonSchemaGeneratorSettings(); - _settings.SerializerSettings = new JsonSerializerSettings(); - _settings.SerializerSettings.Converters.Add(new StringEnumConverter()); + JsonSerializerSettings serializerSettings = new(); + serializerSettings.Converters.Add(new StringEnumConverter()); + + _settings = new NewtonsoftJsonSchemaGeneratorSettings + { + SerializerSettings = serializerSettings + }; + _settings.SchemaProcessors.Add(new ExperimentalSchemaProcessor()); _resolver = new JsonSchemaResolver(rootSchema, _settings); diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.HostingStartup.UnitTests/ParameterCapturing/Pipeline/CaptureLimitPolicyProbesTests.cs b/src/Tests/Microsoft.Diagnostics.Monitoring.HostingStartup.UnitTests/ParameterCapturing/Pipeline/CaptureLimitPolicyProbesTests.cs new file mode 100644 index 00000000000..1e196a890f3 --- /dev/null +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.HostingStartup.UnitTests/ParameterCapturing/Pipeline/CaptureLimitPolicyProbesTests.cs @@ -0,0 +1,135 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using Microsoft.Diagnostics.Monitoring.HostingStartup.ParameterCapturing.Pipeline; +using Microsoft.Diagnostics.Monitoring.TestCommon; +using System; +using System.Collections.Generic; +using System.Reflection; +using System.Threading; +using System.Threading.Tasks; +using Xunit; + +namespace Microsoft.Diagnostics.Monitoring.HostingStartup.UnitTests.ParameterCapturing.Pipeline +{ + [TargetFrameworkMonikerTrait(TargetFrameworkMonikerExtensions.CurrentTargetFrameworkMoniker)] + public class CaptureLimitPolicyProbesTests + { + [Fact] + public void EnterProbe_RequestStop_OnLimitReached() + { + // Arrange + TaskCompletionSource requestStop = new(); + CaptureLimitPolicyProbes probes = new(new TestFunctionProbes(), captureLimit: 1, requestStop); + + // Act + probes.EnterProbe(1, []); + + // Assert + Assert.True(requestStop.Task.IsCompleted); + } + + [Fact] + public void EnterProbe_DoesNotRequestStop_WhenLimitNotReached() + { + // Arrange + TaskCompletionSource requestStop = new(); + CaptureLimitPolicyProbes probes = new(new TestFunctionProbes(), captureLimit: 2, requestStop); + + // Act + probes.EnterProbe(1, []); + + // Assert + Assert.False(requestStop.Task.IsCompleted); + } + + [Fact] + public void EnterProbe_DoesNotRequestStop_WhenProbeDoesNotCapture() + { + // Arrange + TestFunctionProbes testProbes = new(onEnterProbe: (_, _) => + { + return false; + }); + + TaskCompletionSource requestStop = new(); + CaptureLimitPolicyProbes probes = new(testProbes, captureLimit: 1, requestStop); + + // Act + probes.EnterProbe(1, []); + + // Assert + Assert.False(requestStop.Task.IsCompleted); + } + + [Fact] + public void EnterProbe_ShortCircuits_WhenLimitReached() + { + // Arrange + int invokeCount = 0; + TestFunctionProbes testProbes = new(onEnterProbe: (_, _) => + { + Interlocked.Increment(ref invokeCount); + return true; + }); + + TaskCompletionSource requestStop = new(); + CaptureLimitPolicyProbes probes = new(testProbes, captureLimit: 1, requestStop); + + // Act + probes.EnterProbe(1, []); + probes.EnterProbe(2, []); + + // Assert + Assert.Equal(1, invokeCount); + } + + [Fact] + public void CacheMethods_PassesThrough() + { + // Arrange + List expectedMethods = [(MethodInfo)MethodBase.GetCurrentMethod()]; + IList actualMethods = null; + + TaskCompletionSource requestStop = new(); + CaptureLimitPolicyProbes probes = new(new TestFunctionProbes( + onCacheMethods: (methods) => + { + actualMethods = methods; + }), captureLimit: 1, requestStop); + + // Act + probes.CacheMethods(expectedMethods); + + // Assert + Assert.Equal(expectedMethods, actualMethods); + } + + [Fact] + public void EnterProbe_PassesThrough() + { + // Arrange + const ulong expectedUniquifier = 15; + object[] expectedArgs = [new Uri("https://www.example.com"), 10]; + + ulong? actualUniquifier = null; + object[] actualArgs = null; + + TaskCompletionSource requestStop = new(); + CaptureLimitPolicyProbes probes = new(new TestFunctionProbes( + onEnterProbe: (uniquifier, args) => + { + actualUniquifier = uniquifier; + actualArgs = args; + return true; + }), captureLimit: 1, requestStop); + + // Act + probes.EnterProbe(expectedUniquifier, expectedArgs); + + // Assert + Assert.Equal(expectedUniquifier, actualUniquifier); + Assert.Equal(expectedArgs, actualArgs); + } + } +} diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.HostingStartup.UnitTests/ParameterCapturing/Pipeline/ParameterCapturingPipelineTests.cs b/src/Tests/Microsoft.Diagnostics.Monitoring.HostingStartup.UnitTests/ParameterCapturing/Pipeline/ParameterCapturingPipelineTests.cs index d06d57833fe..b703a9b8641 100644 --- a/src/Tests/Microsoft.Diagnostics.Monitoring.HostingStartup.UnitTests/ParameterCapturing/Pipeline/ParameterCapturingPipelineTests.cs +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.HostingStartup.UnitTests/ParameterCapturing/Pipeline/ParameterCapturingPipelineTests.cs @@ -19,25 +19,14 @@ namespace Microsoft.Diagnostics.Monitoring.HostingStartup.UnitTests.ParameterCapturing.Pipeline { - internal sealed class TestFunctionProbes : IFunctionProbes - { - public void CacheMethods(IList methods) - { - } - - public void EnterProbe(ulong uniquifier, object[] args) - { - } - } - internal sealed class TestFunctionProbesManager : IFunctionProbesManager { - private readonly Action> _onStart; + private readonly Action, IFunctionProbes> _onStart; private readonly Action _onStop; public event EventHandler OnProbeFault; - public TestFunctionProbesManager(Action> onStart = null, Action onStop = null) + public TestFunctionProbesManager(Action, IFunctionProbes> onStart = null, Action onStop = null) { _onStart = onStart; _onStop = onStop; @@ -51,7 +40,7 @@ public void TriggerFault(MethodInfo method) public Task StartCapturingAsync(IList methods, IFunctionProbes probes, CancellationToken token) { - _onStart?.Invoke(methods); + _onStart?.Invoke(methods, probes); return Task.CompletedTask; } @@ -164,7 +153,7 @@ public async Task Request_DoesInstallAndNotify() TestFunctionProbes probes = new(); TestFunctionProbesManager probeManager = new( - onStart: (_) => + onStart: (_, _) => { probeManagerStartSource.TrySetResult(); }); @@ -207,6 +196,20 @@ public void Request_DoesRejectDenyListMatch() Assert.Throws(() => pipeline.SubmitRequest(payload, probes)); } + [Theory] + [InlineData(0)] + [InlineData(-1)] + public void Request_InvalidCaptureLimit_Throws(int captureLimit) + { + // Arrange + ParameterCapturingPipeline pipeline = new(new TestFunctionProbesManager(), new TestParameterCapturingCallbacks(), new TestMethodDescriptionValidator()); + TestFunctionProbes probes = new(); + StartCapturingParametersPayload payload = CreateStartCapturingPayload(Timeout.InfiniteTimeSpan, captureLimit: captureLimit); + + // Act & Assert + Assert.Throws(() => pipeline.SubmitRequest(payload, probes)); + } + [Fact] public async Task UnresolvableMethod_DoesNotify() { @@ -341,6 +344,57 @@ public async Task Request_StopsAfterDuration() Assert.Equal(payload.RequestId, stoppedRequest); } + [Theory] + [InlineData(1)] + [InlineData(2)] + [InlineData(10)] + public async Task Request_StopsAfterCaptureLimit(int captureLimit) + { + // Arrange + using CancellationTokenSource cts = new(); + cts.CancelAfter(CommonTestTimeouts.GeneralTimeout); + + TaskCompletionSource onStopCallbackSource = new(TaskCreationOptions.RunContinuationsAsynchronously); + TaskCompletionSource onStartCallbackSource = new(TaskCreationOptions.RunContinuationsAsynchronously); + + using IDisposable registration = cts.Token.Register(() => + { + _ = onStartCallbackSource.TrySetCanceled(cts.Token); + _ = onStopCallbackSource.TrySetCanceled(cts.Token); + }); + + TestFunctionProbes probes = new(); + TestFunctionProbesManager probeManager = new( + onStart: (_, probes) => + { + onStartCallbackSource.TrySetResult(probes); + }); + + TestParameterCapturingCallbacks callbacks = new( + onCapturingStop: (requestId) => + { + onStopCallbackSource.TrySetResult(requestId); + }); + + ParameterCapturingPipeline pipeline = new(probeManager, callbacks, new TestMethodDescriptionValidator()); + StartCapturingParametersPayload payload = CreateStartCapturingPayload(Timeout.InfiniteTimeSpan, captureLimit: captureLimit); + + Task pipelineTask = pipeline.RunAsync(cts.Token); + pipeline.SubmitRequest(payload, probes); + IFunctionProbes pipelineProbes = await onStartCallbackSource.Task; + + // Act + for (int i = 0; i < captureLimit; i++) + { + pipelineProbes.EnterProbe((ulong)i, []); + } + + + // Assert + Guid stoppedRequest = await onStopCallbackSource.Task; + Assert.Equal(payload.RequestId, stoppedRequest); + } + [Fact] public async Task ProbeFault_DoesNotify() { @@ -417,7 +471,7 @@ public async Task RunAsync_ThrowsOnCapturingStopFailure() Assert.Equal(ex, thrownException); } - private StartCapturingParametersPayload CreateStartCapturingPayload(TimeSpan duration) + private StartCapturingParametersPayload CreateStartCapturingPayload(TimeSpan duration, int? captureLimit = null) { string moduleName = typeof(ParameterCapturingPipelineTests).Module.Name; Assert.NotNull(moduleName); @@ -439,7 +493,8 @@ private StartCapturingParametersPayload CreateStartCapturingPayload(TimeSpan dur TypeName = typeName, MethodName = nameof(CreateStartCapturingPayload) } - } + }, + CaptureLimit = captureLimit } }; } diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.HostingStartup.UnitTests/ParameterCapturing/Pipeline/TestFunctionProbes.cs b/src/Tests/Microsoft.Diagnostics.Monitoring.HostingStartup.UnitTests/ParameterCapturing/Pipeline/TestFunctionProbes.cs new file mode 100644 index 00000000000..664e6a84473 --- /dev/null +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.HostingStartup.UnitTests/ParameterCapturing/Pipeline/TestFunctionProbes.cs @@ -0,0 +1,35 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using Microsoft.Diagnostics.Monitoring.HostingStartup.ParameterCapturing.FunctionProbes; +using System; +using System.Collections.Generic; +using System.Reflection; + +namespace Microsoft.Diagnostics.Monitoring.HostingStartup.UnitTests.ParameterCapturing.Pipeline +{ + internal sealed class TestFunctionProbes : IFunctionProbes + { + private readonly Func _onEnterProbe; + private readonly Action> _onCacheMethods; + + public TestFunctionProbes(Func onEnterProbe = null, Action> onCacheMethods = null) + { + _onEnterProbe = onEnterProbe; + _onCacheMethods = onCacheMethods; + } + + + public void CacheMethods(IList methods) + { + _onCacheMethods?.Invoke(methods); + } + + public bool EnterProbe(ulong uniquifier, object[] args) + { + return _onEnterProbe != null + ? _onEnterProbe(uniquifier, args) + : true; + } + } +} diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.OpenApiGen/Program.cs b/src/Tests/Microsoft.Diagnostics.Monitoring.OpenApiGen/Program.cs index 6fe847537c6..41ee7d2f2cd 100644 --- a/src/Tests/Microsoft.Diagnostics.Monitoring.OpenApiGen/Program.cs +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.OpenApiGen/Program.cs @@ -6,8 +6,6 @@ using Microsoft.Diagnostics.Tools.Monitor.Swagger; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; -using Microsoft.OpenApi.Models; -using Microsoft.OpenApi.Writers; using Swashbuckle.AspNetCore.Swagger; using System; using System.Globalization; @@ -47,14 +45,10 @@ public static void Main(string[] args) }) .Build(); - // Generate the OpenAPI document - OpenApiDocument document = host.Services - .GetRequiredService() - .GetSwagger("v1"); - - // Serialize the document to the file + // Serialize the OpenApi document using StringWriter outputWriter = new(CultureInfo.InvariantCulture); - document.SerializeAsV3(new OpenApiJsonWriter(outputWriter)); + ISwaggerProvider provider = host.Services.GetRequiredService(); + provider.WriteTo(outputWriter); outputWriter.Flush(); // Normalize line endings before writing diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/Options/OptionsExtensions.cs b/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/Options/OptionsExtensions.cs index 995a37b34db..0a3cbf7cf05 100644 --- a/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/Options/OptionsExtensions.cs +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/Options/OptionsExtensions.cs @@ -205,7 +205,6 @@ public static RootOptions UseAzureAd(this RootOptions options) return options.UseAzureAd( tenantId: Guid.NewGuid().ToString("D"), clientId: Guid.NewGuid().ToString("D"), - swaggerScope: Guid.NewGuid().ToString("D"), requiredRole: Guid.NewGuid().ToString("D")); } @@ -214,18 +213,16 @@ public static RootOptions UseAzureAd(this RootOptions options, string requiredRo return options.UseAzureAd( tenantId: Guid.NewGuid().ToString("D"), clientId: Guid.NewGuid().ToString("D"), - swaggerScope: Guid.NewGuid().ToString("D"), requiredRole: requiredRole); } - public static RootOptions UseAzureAd(this RootOptions options, string tenantId, string clientId, string swaggerScope, string requiredRole) + public static RootOptions UseAzureAd(this RootOptions options, string tenantId, string clientId, string requiredRole) { return options.UseAzureAd(new AzureAdOptions { TenantId = tenantId, ClientId = clientId, - RequiredRole = requiredRole, - SwaggerScope = swaggerScope + RequiredRole = requiredRole }); } diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/RootTests.cs b/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/RootTests.cs index 24fd01f4daa..841a6d80243 100644 --- a/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/RootTests.cs +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.FunctionalTests/RootTests.cs @@ -37,12 +37,12 @@ public async Task RootRoutesReturnTest() await using MonitorCollectRunner toolRunner = new(_outputHelper); await toolRunner.StartAsync(); - // Test default URL root returns HTTP 302 meaning that its an explicit redirect rather than implicitly handled by something unexpected + // Test that the root route returns HTTP 200 OK using HttpClient defaultHttpClient = await toolRunner.CreateHttpClientDefaultAddressAsync(_httpClientFactory, ServiceProviderFixture.HttpClientName_NoRedirect); ApiClient defaultApiClient = new(_outputHelper, defaultHttpClient); var rootResult = await defaultApiClient.GetRootAsync(); - Assert.Equal(HttpStatusCode.Redirect, rootResult.StatusCode); + Assert.Equal(HttpStatusCode.OK, rootResult.StatusCode); // Disabled as there doesn't seem to be anything different about the metrics root URL from the one above. diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTests/CollectionRulePipelineTests.cs b/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTests/CollectionRulePipelineTests.cs index 88d4fee2c73..1145a893d91 100644 --- a/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTests/CollectionRulePipelineTests.cs +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.Tool.UnitTests/CollectionRulePipelineTests.cs @@ -229,7 +229,7 @@ public Task CollectionRulePipeline_EventMeterTriggerTest_Gauge(TargetFrameworkMo /// /// Test that the pipeline works with the EventMeter trigger greater-than (histogram instrument). /// - [Theory] + [Theory(Skip = "https://github.com/dotnet/dotnet-monitor/issues/6154")] [MemberData(nameof(GetCurrentTfm))] public Task CollectionRulePipeline_EventMeterTriggerTest_Histogram_GreaterThan(TargetFrameworkMoniker appTfm) { diff --git a/src/Tests/Microsoft.Diagnostics.Monitoring.UnitTestApp/Scenarios/FunctionProbes/PerFunctionProbeProxy.cs b/src/Tests/Microsoft.Diagnostics.Monitoring.UnitTestApp/Scenarios/FunctionProbes/PerFunctionProbeProxy.cs index cbf1c108457..64fd7b69f5e 100644 --- a/src/Tests/Microsoft.Diagnostics.Monitoring.UnitTestApp/Scenarios/FunctionProbes/PerFunctionProbeProxy.cs +++ b/src/Tests/Microsoft.Diagnostics.Monitoring.UnitTestApp/Scenarios/FunctionProbes/PerFunctionProbeProxy.cs @@ -83,14 +83,16 @@ public bool TryGetProbeAssertException(MethodInfo method, out XunitException exc return exception != null; } - public void EnterProbe(ulong uniquifier, object[] args) + public bool EnterProbe(ulong uniquifier, object[] args) { if (!_perFunctionProbes.TryGetValue(uniquifier, out PerFunctionProbeWrapper probe)) { - return; + return false; } probe.Invoke(args); + + return true; } public void CacheMethods(IList methods) diff --git a/src/Tools/dotnet-monitor/Auth/ApiKey/MonitorApiKeyAuthConfigurator.cs b/src/Tools/dotnet-monitor/Auth/ApiKey/MonitorApiKeyAuthConfigurator.cs index 9eba262300b..dc31e6b27df 100644 --- a/src/Tools/dotnet-monitor/Auth/ApiKey/MonitorApiKeyAuthConfigurator.cs +++ b/src/Tools/dotnet-monitor/Auth/ApiKey/MonitorApiKeyAuthConfigurator.cs @@ -10,7 +10,6 @@ using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; using Swashbuckle.AspNetCore.SwaggerGen; -using Swashbuckle.AspNetCore.SwaggerUI; using System; using System.Collections.Generic; @@ -67,10 +66,6 @@ public void ConfigureSwaggerGenAuth(SwaggerGenOptions options) options.AddBearerTokenAuthOption(ApiKeySecurityDefinitionName); } - public void ConfigureSwaggerUI(SwaggerUIOptions options) - { - } - public IStartupLogger CreateStartupLogger(ILogger logger, IServiceProvider serviceProvider) { return new AuthenticationStartupLoggerWrapper(() => diff --git a/src/Tools/dotnet-monitor/Auth/AzureAd/AzureAdAuthConfigurator.cs b/src/Tools/dotnet-monitor/Auth/AzureAd/AzureAdAuthConfigurator.cs index 73ddd624ffe..c77ba1a45bf 100644 --- a/src/Tools/dotnet-monitor/Auth/AzureAd/AzureAdAuthConfigurator.cs +++ b/src/Tools/dotnet-monitor/Auth/AzureAd/AzureAdAuthConfigurator.cs @@ -2,34 +2,24 @@ // The .NET Foundation licenses this file to you under the MIT license. using Microsoft.AspNetCore.Authentication.JwtBearer; -using Microsoft.AspNetCore.Builder; using Microsoft.Diagnostics.Monitoring.WebApi; -using Microsoft.Diagnostics.Tools.Monitor.Swagger; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; using Microsoft.Identity.Web; using Microsoft.OpenApi.Models; using Swashbuckle.AspNetCore.SwaggerGen; -using Swashbuckle.AspNetCore.SwaggerUI; using System; -using System.Collections.Generic; namespace Microsoft.Diagnostics.Tools.Monitor.Auth.AzureAd { internal sealed class AzureAdAuthConfigurator : IAuthenticationConfigurator { private readonly AzureAdOptions _azureAdOptions; - private readonly string _fqSwaggerScope; public AzureAdAuthConfigurator(AzureAdOptions azureAdOptions) { _azureAdOptions = azureAdOptions; - - if (_azureAdOptions.SwaggerScope != null) - { - _fqSwaggerScope = new Uri(_azureAdOptions.GetAppIdUri(), _azureAdOptions.SwaggerScope).ToString(); - } } public void ConfigureApiAuth(IServiceCollection services, HostBuilderContext context) @@ -60,60 +50,32 @@ public void ConfigureApiAuth(IServiceCollection services, HostBuilderContext con public void ConfigureSwaggerGenAuth(SwaggerGenOptions options) { const string OAuth2SecurityDefinitionName = "OAuth2"; - const string AzureAdBearerTokenSecurityDefinitionName = "AzureAd JWT"; - // Only present an option to interactively authenticate if a swagger scope is set. - if (_fqSwaggerScope != null) - { - Uri baseEndpoint = new Uri(_azureAdOptions.GetInstance(), FormattableString.Invariant($"{_azureAdOptions.GetTenantId()}/oauth2/v2.0/")); + Uri baseEndpoint = new Uri(_azureAdOptions.GetInstance(), FormattableString.Invariant($"{_azureAdOptions.GetTenantId()}/oauth2/v2.0/")); - options.AddSecurityDefinition(OAuth2SecurityDefinitionName, new OpenApiSecurityScheme + options.AddSecurityDefinition(OAuth2SecurityDefinitionName, new OpenApiSecurityScheme + { + Type = SecuritySchemeType.OAuth2, + Flows = new OpenApiOAuthFlows { - Type = SecuritySchemeType.OAuth2, - Flows = new OpenApiOAuthFlows + AuthorizationCode = new OpenApiOAuthFlow { - AuthorizationCode = new OpenApiOAuthFlow - { - AuthorizationUrl = new Uri(baseEndpoint, "authorize"), - TokenUrl = new Uri(baseEndpoint, "token"), - Scopes = new Dictionary() - { - { _fqSwaggerScope, Strings.HelpDescription_SwaggerScope_AzureAd } - } - } + AuthorizationUrl = new Uri(baseEndpoint, "authorize"), + TokenUrl = new Uri(baseEndpoint, "token") } - }); + } + }); - options.AddSecurityRequirement(new OpenApiSecurityRequirement + options.AddSecurityRequirement(new OpenApiSecurityRequirement + { { + new OpenApiSecurityScheme { - new OpenApiSecurityScheme - { - Reference = new OpenApiReference { Type= ReferenceType.SecurityScheme, Id = OAuth2SecurityDefinitionName } - }, - new [] { _fqSwaggerScope } - } - }); - } - - options.AddBearerTokenAuthOption(AzureAdBearerTokenSecurityDefinitionName); - } - - public void ConfigureSwaggerUI(SwaggerUIOptions options) - { - if (_fqSwaggerScope == null) - { - return; - } - - // Use authorization code flow instead of the implicit flow. - // AzureAD advises against using implicit flow and requires manual editing of the - // App Registration manifest to enable. - options.OAuthUsePkce(); - - // Set default field values in the UI. - options.OAuthClientId(_azureAdOptions.ClientId); - options.OAuthScopes(_fqSwaggerScope); + Reference = new OpenApiReference { Type= ReferenceType.SecurityScheme, Id = OAuth2SecurityDefinitionName } + }, + Array.Empty() + } + }); } public IStartupLogger CreateStartupLogger(ILogger logger, IServiceProvider _) diff --git a/src/Tools/dotnet-monitor/Auth/IAuthenticationConfigurator.cs b/src/Tools/dotnet-monitor/Auth/IAuthenticationConfigurator.cs index 70b392d85fa..72a7e8e6994 100644 --- a/src/Tools/dotnet-monitor/Auth/IAuthenticationConfigurator.cs +++ b/src/Tools/dotnet-monitor/Auth/IAuthenticationConfigurator.cs @@ -5,7 +5,6 @@ using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; using Swashbuckle.AspNetCore.SwaggerGen; -using Swashbuckle.AspNetCore.SwaggerUI; using System; namespace Microsoft.Diagnostics.Tools.Monitor.Auth @@ -14,7 +13,6 @@ internal interface IAuthenticationConfigurator { void ConfigureApiAuth(IServiceCollection services, HostBuilderContext context); void ConfigureSwaggerGenAuth(SwaggerGenOptions options); - void ConfigureSwaggerUI(SwaggerUIOptions options); IStartupLogger CreateStartupLogger(ILogger logger, IServiceProvider serviceProvider); } } diff --git a/src/Tools/dotnet-monitor/Auth/NoAuth/NoAuthConfigurator.cs b/src/Tools/dotnet-monitor/Auth/NoAuth/NoAuthConfigurator.cs index 6950fe71f88..c2d2f0dee89 100644 --- a/src/Tools/dotnet-monitor/Auth/NoAuth/NoAuthConfigurator.cs +++ b/src/Tools/dotnet-monitor/Auth/NoAuth/NoAuthConfigurator.cs @@ -6,7 +6,6 @@ using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; using Swashbuckle.AspNetCore.SwaggerGen; -using Swashbuckle.AspNetCore.SwaggerUI; using System; namespace Microsoft.Diagnostics.Tools.Monitor.Auth.NoAuth @@ -28,10 +27,6 @@ public void ConfigureSwaggerGenAuth(SwaggerGenOptions options) { } - public void ConfigureSwaggerUI(SwaggerUIOptions options) - { - } - public IStartupLogger CreateStartupLogger(ILogger logger, IServiceProvider _) { return new AuthenticationStartupLoggerWrapper(logger.NoAuthentication); diff --git a/src/Tools/dotnet-monitor/Startup.cs b/src/Tools/dotnet-monitor/Startup.cs index 89847a55953..9253658b750 100644 --- a/src/Tools/dotnet-monitor/Startup.cs +++ b/src/Tools/dotnet-monitor/Startup.cs @@ -8,12 +8,14 @@ using Microsoft.AspNetCore.ResponseCompression; using Microsoft.Diagnostics.Monitoring.WebApi; using Microsoft.Diagnostics.Monitoring.WebApi.Controllers; -using Microsoft.Diagnostics.Tools.Monitor.Auth; +using Microsoft.Diagnostics.Tools.Monitor.Swagger; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Options; +using Swashbuckle.AspNetCore.Swagger; using System.Collections.Generic; +using System.IO; using System.IO.Compression; using System.Text.Json.Serialization; @@ -84,13 +86,6 @@ public static void Configure(IApplicationBuilder app, IWebHostEnvironment env, I app.UseSwagger(); - IAuthenticationConfigurator authConfigurator = app.ApplicationServices.GetRequiredService(); - app.UseSwaggerUI(options => - { - options.SwaggerEndpoint("/swagger/v1/swagger.json", "dotnet-monitor v1.0"); - authConfigurator.ConfigureSwaggerUI(options); - }); - app.UseRouting(); app.UseAuthentication(); @@ -109,12 +104,11 @@ public static void Configure(IApplicationBuilder app, IWebHostEnvironment env, I { builder.MapControllers(); - // Use a redirect to the Swagger UI if the request hits the default endpoint. - // This means that the swagger endpoint can have a permanent address, even if we decide to host - // something else at the root later. - builder.MapGet("/", (HttpResponse response) => + builder.MapGet("/", (HttpResponse response, ISwaggerProvider provider) => { - response.Redirect("/swagger/index.html"); + using Stream stream = response.BodyWriter.AsStream(true); + + provider.WriteTo(stream); }); }); } diff --git a/src/Tools/dotnet-monitor/Strings.Designer.cs b/src/Tools/dotnet-monitor/Strings.Designer.cs index f78a5abf928..e717566c526 100644 --- a/src/Tools/dotnet-monitor/Strings.Designer.cs +++ b/src/Tools/dotnet-monitor/Strings.Designer.cs @@ -745,15 +745,6 @@ internal static string HelpDescription_SecurityDefinitionDescription_ApiKey { } } - /// - /// Looks up a localized string similar to Swagger UI permission. - /// - internal static string HelpDescription_SwaggerScope_AzureAd { - get { - return ResourceManager.GetString("HelpDescription_SwaggerScope_AzureAd", resourceCulture); - } - } - /// /// Looks up a localized string similar to Collection rule action options '{settingsType}' must implement ICloneable to support property tokenization.. /// diff --git a/src/Tools/dotnet-monitor/Strings.resx b/src/Tools/dotnet-monitor/Strings.resx index b5a53b94af6..05dd147c6ab 100644 --- a/src/Tools/dotnet-monitor/Strings.resx +++ b/src/Tools/dotnet-monitor/Strings.resx @@ -445,10 +445,6 @@ JWT Authorization header using bearer token authentication. Put the Authorization header value ("Bearer" prefix and the JWT value) in the textbox when prompted. Gets a string used in the generated OpenAPI doc for the auth scheme - - Swagger UI permission - Gets a string used in the generated OpenAPI doc for the auth scheme - Collection rule action options '{settingsType}' must implement ICloneable to support property tokenization. diff --git a/src/Tools/dotnet-monitor/Swagger/SwaggerProviderExtensions.cs b/src/Tools/dotnet-monitor/Swagger/SwaggerProviderExtensions.cs new file mode 100644 index 00000000000..b99b5c60cc7 --- /dev/null +++ b/src/Tools/dotnet-monitor/Swagger/SwaggerProviderExtensions.cs @@ -0,0 +1,27 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using Microsoft.OpenApi.Models; +using Microsoft.OpenApi.Writers; +using Swashbuckle.AspNetCore.Swagger; +using System.IO; + +namespace Microsoft.Diagnostics.Tools.Monitor.Swagger +{ + internal static class SwaggerProviderExtensions + { + public static void WriteTo(this ISwaggerProvider provider, Stream stream) + { + using StreamWriter writer = new(stream); + + provider.WriteTo(writer); + } + + public static void WriteTo(this ISwaggerProvider provider, TextWriter writer) + { + OpenApiDocument document = provider.GetSwagger("v1"); + + document.SerializeAsV3(new OpenApiJsonWriter(writer)); + } + } +} diff --git a/src/Tools/dotnet-monitor/dotnet-monitor.csproj b/src/Tools/dotnet-monitor/dotnet-monitor.csproj index 1aed9ef4919..de82ee8efdc 100644 --- a/src/Tools/dotnet-monitor/dotnet-monitor.csproj +++ b/src/Tools/dotnet-monitor/dotnet-monitor.csproj @@ -16,7 +16,6 @@ -