From dcec6de171291a2bf7907275b8091a3240e32836 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 1 Aug 2025 14:23:16 -0700 Subject: [PATCH 001/118] In progress of figuring out eng branch checkout mechanics. --- .../stages/workload-public-build.yml | 22 +++++++++++++++++++ .../templates/variables/workload-public.yml | 9 ++++++++ 2 files changed, 31 insertions(+) create mode 100644 eng/pipelines/templates/stages/workload-public-build.yml create mode 100644 eng/pipelines/templates/variables/workload-public.yml diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml new file mode 100644 index 000000000..ac5612439 --- /dev/null +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -0,0 +1,22 @@ +stages: +- stage: Build + displayName: Build + jobs: + - template: /eng/common/templates/job/job.yml + parameters: + name: buildRepo + displayName: Build Repo + pool: + name: $(DncEngPublicBuildPool) + demands: ImageOverride -equals windows.vs2022.amd64.open + artifacts: + publish: + logs: true + steps: + - powershell: >- + eng/common/build.ps1 + -restore -build -pack -ci -msbuildEngine vs + -configuration $(_BuildConfig) + /p:DotNetSignType=$(_SignType) + /p:TeamName=$(_TeamName) + displayName: 🟣 Build solution \ No newline at end of file diff --git a/eng/pipelines/templates/variables/workload-public.yml b/eng/pipelines/templates/variables/workload-public.yml new file mode 100644 index 000000000..5f9a48fca --- /dev/null +++ b/eng/pipelines/templates/variables/workload-public.yml @@ -0,0 +1,9 @@ +variables: +- name: _SignType + value: test +- name: _TeamName + value: DotNet-Cli +- name: _BuildConfig + value: Release +- name: PostBuildSign + value: true \ No newline at end of file From 28002f3a2fa98c079c57f6b24bb3768b56a23d36 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 1 Aug 2025 14:42:56 -0700 Subject: [PATCH 002/118] Removed files that would not be managed in eng. Renamed a few files since I'm not sure of how to handle them yet. --- .github/workflows/backport.yml | 19 - CODE-OF-CONDUCT.md | 6 - LICENSE.md | 23 - NuGet.config | 37 - README.md | 20 - build.cmd => build-eng.cmd | 0 doc/release-process.md | 96 -- eng/Version.Details.xml | 49 - eng/Versions.props | 74 -- .../build-configuration.json | 4 - eng/common/CIBuild.cmd | 2 - eng/common/PSScriptAnalyzerSettings.psd1 | 11 - eng/common/README.md | 28 - eng/common/SetupNugetSources.ps1 | 171 ---- eng/common/SetupNugetSources.sh | 167 ---- eng/common/build.cmd | 3 - eng/common/build.ps1 | 178 ---- eng/common/build.sh | 277 ----- eng/common/cibuild.sh | 16 - eng/common/core-templates/job/job.yml | 225 ----- eng/common/core-templates/job/onelocbuild.yml | 120 --- .../job/publish-build-assets.yml | 194 ---- .../core-templates/job/source-build.yml | 96 -- .../job/source-index-stage1.yml | 44 - .../core-templates/jobs/codeql-build.yml | 32 - eng/common/core-templates/jobs/jobs.yml | 119 --- .../core-templates/jobs/source-build.yml | 58 -- .../post-build/common-variables.yml | 22 - .../core-templates/post-build/post-build.yml | 325 ------ .../post-build/setup-maestro-vars.yml | 74 -- .../steps/cleanup-microbuild.yml | 28 - .../steps/component-governance.yml | 16 - .../steps/enable-internal-runtimes.yml | 32 - .../steps/enable-internal-sources.yml | 47 - .../core-templates/steps/generate-sbom.yml | 54 - .../steps/get-delegation-sas.yml | 46 - .../steps/get-federated-access-token.yml | 42 - .../steps/install-microbuild.yml | 54 - .../steps/publish-build-artifacts.yml | 20 - .../core-templates/steps/publish-logs.yml | 61 -- .../steps/publish-pipeline-artifacts.yml | 20 - .../core-templates/steps/retain-build.yml | 28 - .../core-templates/steps/send-to-helix.yml | 93 -- .../core-templates/steps/source-build.yml | 65 -- .../steps/source-index-stage1-publish.yml | 35 - .../variables/pool-providers.yml | 8 - eng/common/cross/arm/tizen/tizen.patch | 9 - eng/common/cross/arm64/tizen/tizen.patch | 9 - eng/common/cross/armel/tizen/tizen.patch | 9 - eng/common/cross/build-android-rootfs.sh | 146 --- eng/common/cross/build-rootfs.sh | 835 ---------------- eng/common/cross/install-debs.py | 334 ------- eng/common/cross/riscv64/tizen/tizen.patch | 9 - eng/common/cross/tizen-build-rootfs.sh | 82 -- eng/common/cross/tizen-fetch.sh | 178 ---- eng/common/cross/toolchain.cmake | 387 ------- eng/common/cross/x64/tizen/tizen.patch | 9 - eng/common/cross/x86/tizen/tizen.patch | 9 - eng/common/darc-init.ps1 | 47 - eng/common/darc-init.sh | 82 -- eng/common/dotnet-install.cmd | 2 - eng/common/dotnet-install.ps1 | 28 - eng/common/dotnet-install.sh | 94 -- eng/common/dotnet.cmd | 7 - eng/common/dotnet.ps1 | 11 - eng/common/dotnet.sh | 26 - eng/common/enable-cross-org-publishing.ps1 | 13 - eng/common/generate-locproject.ps1 | 189 ---- eng/common/generate-sbom-prep.ps1 | 29 - eng/common/generate-sbom-prep.sh | 39 - eng/common/helixpublish.proj | 27 - eng/common/init-tools-native.cmd | 3 - eng/common/init-tools-native.ps1 | 203 ---- eng/common/init-tools-native.sh | 238 ----- eng/common/internal-feed-operations.ps1 | 132 --- eng/common/internal-feed-operations.sh | 141 --- eng/common/internal/Directory.Build.props | 11 - eng/common/internal/NuGet.config | 10 - eng/common/internal/Tools.csproj | 22 - eng/common/loc/P22DotNetHtmlLocalization.lss | 29 - eng/common/msbuild.ps1 | 28 - eng/common/msbuild.sh | 58 -- eng/common/native/CommonLibrary.psm1 | 401 -------- eng/common/native/common-library.sh | 172 ---- eng/common/native/init-compiler.sh | 146 --- eng/common/native/init-distro-rid.sh | 110 -- eng/common/native/init-os-and-arch.sh | 85 -- eng/common/native/install-cmake-test.sh | 117 --- eng/common/native/install-cmake.sh | 117 --- eng/common/native/install-dependencies.sh | 62 -- eng/common/native/install-tool.ps1 | 132 --- eng/common/pipeline-logging-functions.ps1 | 260 ----- eng/common/pipeline-logging-functions.sh | 206 ---- .../post-build/check-channel-consistency.ps1 | 48 - eng/common/post-build/nuget-validation.ps1 | 22 - eng/common/post-build/nuget-verification.ps1 | 121 --- eng/common/post-build/publish-using-darc.ps1 | 69 -- eng/common/post-build/redact-logs.ps1 | 89 -- .../post-build/sourcelink-validation.ps1 | 327 ------ eng/common/post-build/symbols-validation.ps1 | 337 ------- eng/common/retain-build.ps1 | 45 - eng/common/sdk-task.ps1 | 100 -- eng/common/sdk-task.sh | 116 --- eng/common/sdl/NuGet.config | 18 - eng/common/sdl/configure-sdl-tool.ps1 | 130 --- eng/common/sdl/execute-all-sdl-tools.ps1 | 167 ---- eng/common/sdl/extract-artifact-archives.ps1 | 63 -- eng/common/sdl/extract-artifact-packages.ps1 | 82 -- eng/common/sdl/init-sdl.ps1 | 55 - eng/common/sdl/packages.config | 4 - eng/common/sdl/run-sdl.ps1 | 49 - eng/common/sdl/sdl.ps1 | 38 - eng/common/sdl/trim-assets-version.ps1 | 75 -- eng/common/template-guidance.md | 133 --- eng/common/templates-official/job/job.yml | 83 -- .../templates-official/job/onelocbuild.yml | 7 - .../job/publish-build-assets.yml | 7 - .../templates-official/job/source-build.yml | 7 - .../job/source-index-stage1.yml | 7 - .../templates-official/jobs/codeql-build.yml | 7 - eng/common/templates-official/jobs/jobs.yml | 7 - .../templates-official/jobs/source-build.yml | 7 - .../post-build/common-variables.yml | 8 - .../post-build/post-build.yml | 8 - .../post-build/setup-maestro-vars.yml | 8 - .../steps/component-governance.yml | 7 - .../steps/enable-internal-runtimes.yml | 9 - .../steps/enable-internal-sources.yml | 7 - .../steps/generate-sbom.yml | 7 - .../steps/get-delegation-sas.yml | 7 - .../steps/get-federated-access-token.yml | 7 - .../steps/publish-build-artifacts.yml | 46 - .../templates-official/steps/publish-logs.yml | 7 - .../steps/publish-pipeline-artifacts.yml | 28 - .../templates-official/steps/retain-build.yml | 7 - .../steps/send-to-helix.yml | 7 - .../templates-official/steps/source-build.yml | 7 - .../steps/source-index-stage1-publish.yml | 7 - .../variables/pool-providers.yml | 45 - .../variables/sdl-variables.yml | 7 - eng/common/templates/job/job.yml | 84 -- eng/common/templates/job/onelocbuild.yml | 7 - .../templates/job/publish-build-assets.yml | 7 - eng/common/templates/job/source-build.yml | 7 - .../templates/job/source-index-stage1.yml | 7 - eng/common/templates/jobs/codeql-build.yml | 7 - eng/common/templates/jobs/jobs.yml | 7 - eng/common/templates/jobs/source-build.yml | 7 - .../templates/post-build/common-variables.yml | 8 - .../templates/post-build/post-build.yml | 8 - .../post-build/setup-maestro-vars.yml | 8 - .../templates/steps/component-governance.yml | 7 - .../steps/enable-internal-runtimes.yml | 10 - .../steps/enable-internal-sources.yml | 7 - eng/common/templates/steps/generate-sbom.yml | 7 - .../templates/steps/get-delegation-sas.yml | 7 - .../steps/get-federated-access-token.yml | 7 - .../steps/publish-build-artifacts.yml | 46 - eng/common/templates/steps/publish-logs.yml | 7 - .../steps/publish-pipeline-artifacts.yml | 34 - eng/common/templates/steps/retain-build.yml | 7 - eng/common/templates/steps/send-to-helix.yml | 7 - eng/common/templates/steps/source-build.yml | 7 - .../steps/source-index-stage1-publish.yml | 7 - eng/common/templates/steps/vmr-sync.yml | 207 ---- .../templates/variables/pool-providers.yml | 59 -- eng/common/templates/vmr-build-pr.yml | 42 - eng/common/tools.ps1 | 942 ------------------ eng/common/tools.sh | 580 ----------- eng/common/vmr-sync.ps1 | 138 --- eng/common/vmr-sync.sh | 207 ---- eng/pipelines/{public.yml => public-eng.yml} | 0 global.json | 15 - 173 files changed, 13388 deletions(-) delete mode 100644 .github/workflows/backport.yml delete mode 100644 CODE-OF-CONDUCT.md delete mode 100644 LICENSE.md delete mode 100644 NuGet.config delete mode 100644 README.md rename build.cmd => build-eng.cmd (100%) delete mode 100644 doc/release-process.md delete mode 100644 eng/Version.Details.xml delete mode 100644 eng/Versions.props delete mode 100644 eng/common/BuildConfiguration/build-configuration.json delete mode 100644 eng/common/CIBuild.cmd delete mode 100644 eng/common/PSScriptAnalyzerSettings.psd1 delete mode 100644 eng/common/README.md delete mode 100644 eng/common/SetupNugetSources.ps1 delete mode 100755 eng/common/SetupNugetSources.sh delete mode 100644 eng/common/build.cmd delete mode 100644 eng/common/build.ps1 delete mode 100755 eng/common/build.sh delete mode 100755 eng/common/cibuild.sh delete mode 100644 eng/common/core-templates/job/job.yml delete mode 100644 eng/common/core-templates/job/onelocbuild.yml delete mode 100644 eng/common/core-templates/job/publish-build-assets.yml delete mode 100644 eng/common/core-templates/job/source-build.yml delete mode 100644 eng/common/core-templates/job/source-index-stage1.yml delete mode 100644 eng/common/core-templates/jobs/codeql-build.yml delete mode 100644 eng/common/core-templates/jobs/jobs.yml delete mode 100644 eng/common/core-templates/jobs/source-build.yml delete mode 100644 eng/common/core-templates/post-build/common-variables.yml delete mode 100644 eng/common/core-templates/post-build/post-build.yml delete mode 100644 eng/common/core-templates/post-build/setup-maestro-vars.yml delete mode 100644 eng/common/core-templates/steps/cleanup-microbuild.yml delete mode 100644 eng/common/core-templates/steps/component-governance.yml delete mode 100644 eng/common/core-templates/steps/enable-internal-runtimes.yml delete mode 100644 eng/common/core-templates/steps/enable-internal-sources.yml delete mode 100644 eng/common/core-templates/steps/generate-sbom.yml delete mode 100644 eng/common/core-templates/steps/get-delegation-sas.yml delete mode 100644 eng/common/core-templates/steps/get-federated-access-token.yml delete mode 100644 eng/common/core-templates/steps/install-microbuild.yml delete mode 100644 eng/common/core-templates/steps/publish-build-artifacts.yml delete mode 100644 eng/common/core-templates/steps/publish-logs.yml delete mode 100644 eng/common/core-templates/steps/publish-pipeline-artifacts.yml delete mode 100644 eng/common/core-templates/steps/retain-build.yml delete mode 100644 eng/common/core-templates/steps/send-to-helix.yml delete mode 100644 eng/common/core-templates/steps/source-build.yml delete mode 100644 eng/common/core-templates/steps/source-index-stage1-publish.yml delete mode 100644 eng/common/core-templates/variables/pool-providers.yml delete mode 100644 eng/common/cross/arm/tizen/tizen.patch delete mode 100644 eng/common/cross/arm64/tizen/tizen.patch delete mode 100644 eng/common/cross/armel/tizen/tizen.patch delete mode 100755 eng/common/cross/build-android-rootfs.sh delete mode 100755 eng/common/cross/build-rootfs.sh delete mode 100644 eng/common/cross/install-debs.py delete mode 100644 eng/common/cross/riscv64/tizen/tizen.patch delete mode 100755 eng/common/cross/tizen-build-rootfs.sh delete mode 100755 eng/common/cross/tizen-fetch.sh delete mode 100644 eng/common/cross/toolchain.cmake delete mode 100644 eng/common/cross/x64/tizen/tizen.patch delete mode 100644 eng/common/cross/x86/tizen/tizen.patch delete mode 100644 eng/common/darc-init.ps1 delete mode 100755 eng/common/darc-init.sh delete mode 100644 eng/common/dotnet-install.cmd delete mode 100644 eng/common/dotnet-install.ps1 delete mode 100755 eng/common/dotnet-install.sh delete mode 100644 eng/common/dotnet.cmd delete mode 100644 eng/common/dotnet.ps1 delete mode 100644 eng/common/dotnet.sh delete mode 100644 eng/common/enable-cross-org-publishing.ps1 delete mode 100644 eng/common/generate-locproject.ps1 delete mode 100644 eng/common/generate-sbom-prep.ps1 delete mode 100755 eng/common/generate-sbom-prep.sh delete mode 100644 eng/common/helixpublish.proj delete mode 100644 eng/common/init-tools-native.cmd delete mode 100644 eng/common/init-tools-native.ps1 delete mode 100755 eng/common/init-tools-native.sh delete mode 100644 eng/common/internal-feed-operations.ps1 delete mode 100755 eng/common/internal-feed-operations.sh delete mode 100644 eng/common/internal/Directory.Build.props delete mode 100644 eng/common/internal/NuGet.config delete mode 100644 eng/common/internal/Tools.csproj delete mode 100644 eng/common/loc/P22DotNetHtmlLocalization.lss delete mode 100644 eng/common/msbuild.ps1 delete mode 100755 eng/common/msbuild.sh delete mode 100644 eng/common/native/CommonLibrary.psm1 delete mode 100755 eng/common/native/common-library.sh delete mode 100755 eng/common/native/init-compiler.sh delete mode 100755 eng/common/native/init-distro-rid.sh delete mode 100755 eng/common/native/init-os-and-arch.sh delete mode 100755 eng/common/native/install-cmake-test.sh delete mode 100755 eng/common/native/install-cmake.sh delete mode 100644 eng/common/native/install-dependencies.sh delete mode 100644 eng/common/native/install-tool.ps1 delete mode 100644 eng/common/pipeline-logging-functions.ps1 delete mode 100755 eng/common/pipeline-logging-functions.sh delete mode 100644 eng/common/post-build/check-channel-consistency.ps1 delete mode 100644 eng/common/post-build/nuget-validation.ps1 delete mode 100644 eng/common/post-build/nuget-verification.ps1 delete mode 100644 eng/common/post-build/publish-using-darc.ps1 delete mode 100644 eng/common/post-build/redact-logs.ps1 delete mode 100644 eng/common/post-build/sourcelink-validation.ps1 delete mode 100644 eng/common/post-build/symbols-validation.ps1 delete mode 100644 eng/common/retain-build.ps1 delete mode 100644 eng/common/sdk-task.ps1 delete mode 100644 eng/common/sdk-task.sh delete mode 100644 eng/common/sdl/NuGet.config delete mode 100644 eng/common/sdl/configure-sdl-tool.ps1 delete mode 100644 eng/common/sdl/execute-all-sdl-tools.ps1 delete mode 100644 eng/common/sdl/extract-artifact-archives.ps1 delete mode 100644 eng/common/sdl/extract-artifact-packages.ps1 delete mode 100644 eng/common/sdl/init-sdl.ps1 delete mode 100644 eng/common/sdl/packages.config delete mode 100644 eng/common/sdl/run-sdl.ps1 delete mode 100644 eng/common/sdl/sdl.ps1 delete mode 100644 eng/common/sdl/trim-assets-version.ps1 delete mode 100644 eng/common/template-guidance.md delete mode 100644 eng/common/templates-official/job/job.yml delete mode 100644 eng/common/templates-official/job/onelocbuild.yml delete mode 100644 eng/common/templates-official/job/publish-build-assets.yml delete mode 100644 eng/common/templates-official/job/source-build.yml delete mode 100644 eng/common/templates-official/job/source-index-stage1.yml delete mode 100644 eng/common/templates-official/jobs/codeql-build.yml delete mode 100644 eng/common/templates-official/jobs/jobs.yml delete mode 100644 eng/common/templates-official/jobs/source-build.yml delete mode 100644 eng/common/templates-official/post-build/common-variables.yml delete mode 100644 eng/common/templates-official/post-build/post-build.yml delete mode 100644 eng/common/templates-official/post-build/setup-maestro-vars.yml delete mode 100644 eng/common/templates-official/steps/component-governance.yml delete mode 100644 eng/common/templates-official/steps/enable-internal-runtimes.yml delete mode 100644 eng/common/templates-official/steps/enable-internal-sources.yml delete mode 100644 eng/common/templates-official/steps/generate-sbom.yml delete mode 100644 eng/common/templates-official/steps/get-delegation-sas.yml delete mode 100644 eng/common/templates-official/steps/get-federated-access-token.yml delete mode 100644 eng/common/templates-official/steps/publish-build-artifacts.yml delete mode 100644 eng/common/templates-official/steps/publish-logs.yml delete mode 100644 eng/common/templates-official/steps/publish-pipeline-artifacts.yml delete mode 100644 eng/common/templates-official/steps/retain-build.yml delete mode 100644 eng/common/templates-official/steps/send-to-helix.yml delete mode 100644 eng/common/templates-official/steps/source-build.yml delete mode 100644 eng/common/templates-official/steps/source-index-stage1-publish.yml delete mode 100644 eng/common/templates-official/variables/pool-providers.yml delete mode 100644 eng/common/templates-official/variables/sdl-variables.yml delete mode 100644 eng/common/templates/job/job.yml delete mode 100644 eng/common/templates/job/onelocbuild.yml delete mode 100644 eng/common/templates/job/publish-build-assets.yml delete mode 100644 eng/common/templates/job/source-build.yml delete mode 100644 eng/common/templates/job/source-index-stage1.yml delete mode 100644 eng/common/templates/jobs/codeql-build.yml delete mode 100644 eng/common/templates/jobs/jobs.yml delete mode 100644 eng/common/templates/jobs/source-build.yml delete mode 100644 eng/common/templates/post-build/common-variables.yml delete mode 100644 eng/common/templates/post-build/post-build.yml delete mode 100644 eng/common/templates/post-build/setup-maestro-vars.yml delete mode 100644 eng/common/templates/steps/component-governance.yml delete mode 100644 eng/common/templates/steps/enable-internal-runtimes.yml delete mode 100644 eng/common/templates/steps/enable-internal-sources.yml delete mode 100644 eng/common/templates/steps/generate-sbom.yml delete mode 100644 eng/common/templates/steps/get-delegation-sas.yml delete mode 100644 eng/common/templates/steps/get-federated-access-token.yml delete mode 100644 eng/common/templates/steps/publish-build-artifacts.yml delete mode 100644 eng/common/templates/steps/publish-logs.yml delete mode 100644 eng/common/templates/steps/publish-pipeline-artifacts.yml delete mode 100644 eng/common/templates/steps/retain-build.yml delete mode 100644 eng/common/templates/steps/send-to-helix.yml delete mode 100644 eng/common/templates/steps/source-build.yml delete mode 100644 eng/common/templates/steps/source-index-stage1-publish.yml delete mode 100644 eng/common/templates/steps/vmr-sync.yml delete mode 100644 eng/common/templates/variables/pool-providers.yml delete mode 100644 eng/common/templates/vmr-build-pr.yml delete mode 100644 eng/common/tools.ps1 delete mode 100755 eng/common/tools.sh delete mode 100644 eng/common/vmr-sync.ps1 delete mode 100644 eng/common/vmr-sync.sh rename eng/pipelines/{public.yml => public-eng.yml} (100%) delete mode 100644 global.json diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml deleted file mode 100644 index 11d78d911..000000000 --- a/.github/workflows/backport.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: Backport PR to branch -on: - issue_comment: - types: [created] - -permissions: - contents: write - issues: write - pull-requests: write - actions: write - -jobs: - backport: - uses: dotnet/arcade/.github/workflows/backport-base.yml@main - with: - pr_description_template: | - Backport of #%source_pr_number% to %target_branch% - - /cc %cc_users% \ No newline at end of file diff --git a/CODE-OF-CONDUCT.md b/CODE-OF-CONDUCT.md deleted file mode 100644 index 775f221c9..000000000 --- a/CODE-OF-CONDUCT.md +++ /dev/null @@ -1,6 +0,0 @@ -# Code of Conduct - -This project has adopted the code of conduct defined by the Contributor Covenant -to clarify expected behavior in our community. - -For more information, see the [.NET Foundation Code of Conduct](https://dotnetfoundation.org/code-of-conduct). diff --git a/LICENSE.md b/LICENSE.md deleted file mode 100644 index 984713a49..000000000 --- a/LICENSE.md +++ /dev/null @@ -1,23 +0,0 @@ -The MIT License (MIT) - -Copyright (c) .NET Foundation and Contributors - -All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/NuGet.config b/NuGet.config deleted file mode 100644 index b36ef57b3..000000000 --- a/NuGet.config +++ /dev/null @@ -1,37 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/README.md b/README.md deleted file mode 100644 index cc3824080..000000000 --- a/README.md +++ /dev/null @@ -1,20 +0,0 @@ -## Welcome to the .NET SDK Workload Versions repo - -This repository contains the version information for .NET SDK Workloads. - -### Pre-requisites for local VS insertion build - -1. Install the latest [Visual Studio](https://visualstudio.microsoft.com/downloads/) with the .NET Desktop workload - - Make sure to restart your PC after the installation is complete. -2. [Install Azure CLI](https://learn.microsoft.com/cli/azure/install-azure-cli-windows#install-or-update) -3. Run `az login` to authenticate - - When it asks for a subscription to select, just press Enter. The default subscription selection does not affect DARC. -4. [Install DARC](https://github.com/dotnet/arcade/blob/main/Documentation/Darc.md#setting-up-your-darc-client) -5. [Add GitHub auth for DARC](https://github.com/dotnet/arcade/blob/main/Documentation/Darc.md#step-3-set-additional-pats-for-azure-devops-and-github-operations) - - Use the [darc authenticate](https://github.com/dotnet/arcade/blob/main/Documentation/Darc.md#authenticate) command. - - Generate the GitHub PAT [here](https://github.com/settings/tokens?type=beta). Create a fine-grained PAT instead of the classic PAT. - - **Do not** create an AzDO PAT. Leave that entry blank in the darc-authenticate file for it to use local machine auth. -6. Request access to the [.NET Daily Internal Build Access](https://coreidentity.microsoft.com/manage/Entitlement/entitlement/netdailyinte-q2ql) entitlement - - This allows the local AzDO machine auth to gather internal assets from AzDO. - - **Send a message** to one of the primary owners on the entitlement page for approval after requesting access to the entitlement. - - Should take about 20 mins for the entitlement process to complete (will appear on your [entitlements list](https://coreidentity.microsoft.com/manage/entitlement)) and another 30 mins the access to propagate to DARC. Basically, after approval, wait an hour until you actually attempt to build. \ No newline at end of file diff --git a/build.cmd b/build-eng.cmd similarity index 100% rename from build.cmd rename to build-eng.cmd diff --git a/doc/release-process.md b/doc/release-process.md deleted file mode 100644 index 2caa6ee3b..000000000 --- a/doc/release-process.md +++ /dev/null @@ -1,96 +0,0 @@ -# Release Process Documentation - -Based on https://github.com/dotnet/sdk/issues/41607 - -## Current Process - -This section details the current process for releasing workloads. - -1. **Identify the current versions of the workloads you want to ship** -2. **Identify an existing PR that makes those changes** -3. **Update the branding in the Version.props file for the release** - - If it's a monthly release, update the VersionFeature to match the SDK release and set VersionPatch to 0 - - If it's an in-between release, increment the VersionPatch value - - Note, if you are prepping 9 release, make sure you update all impacted branches since we are shipping multiple .NET9 SDKs from one branch -4. **Merge the change** -5. **Wait for the change to flow internally** - - You can check the branch history internally to see if you change made it -7. **Queue a build** - - Target the branch you are shipping from - - Only select both the checkbox for a stable version and publish to the feed if you have high confidence that the branding and versions are correct. - - Without the stable box selected, you will get a -servicing version of the stable ID workload which can be used for testing and saved on the feed - - You can select stable and not publish to the feed and download the package artifacts locally for testing -8. **Test the build** - - start a sandbox - - install the SDK band you intend to test - - create a test folder - - cd \ - - mkdir test - - cd test - - dotnet new nugetconfig - - if you published to a feed - - dotnet nuget add source https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet9-workloads/nuget/v3/index.json - - if you didn't publish to a feed - - copy the packageartifacts into c:\packages - - dotnet nuget add source c:\packages - - dotnet workload update --version - - note that if you are testing before release day, you may have to find and add additional feeds for the various manifest. - - testing the manifests is typically enough -9. **Approve the 'publish on NuGet.org' stage in the AzDO pipeline** - -## Ideal Process - -> [!NOTE] -> This process may vary based on the workload owners. - -> [!NOTE] -> Most of this process today is done through disparate builds in each workload repo and this repo (or manually). Below is outlining our desired goal. - -1. **Publishing Release-Ready Versions** - - Each workload owner publishes release-ready versions to the `dotnet8-workloads` and `dotnet9-workloads` channels. - -2. **Creating Workload Set Repo PRs** - - Workload set repository pull requests (PRs) are created based on the release channel. - -3. **Review and Merge PRs** - - A person reviews the PR, approves it, merges it, and a build is triggered automatically. - -4. **Build Process** - - There are a few phases to the build process: - 1. Create a stable and unstable version of the workload set (a stable version is only needed in servicing). - - 9.0.102-servicing.12345.6 - - 9.0.102 - 2. Publish the unstable version to the workloads feed. - 3. The process is different depending on if we're in servicing/golive. - 1. If we're **not** in servicing/golive: - - Publish runtime, emsdk, and maui to the workloads feed. - 2. If we're in servicing/golive: - - Only publish maui and the unstable workload set to the workloads feed. - - Additionally, create a stable workloads feed and publish the runtime, emsdk, maui, and stable workload to that feed. - - To enable this step, the .NET staging pipeline will have to be modified to publish the runtime and emsdk builds to the appropriate workloads channels. Today, that publishing is done in the runtime public build. - 4. Create a vsdrop for each workload. - -5. **Creating a VS PR** - - Creates a Visual Studio (VS) PR with the workloads JSON file updated. - -6. **Testing and Merging** - - Test the VS PR, sign off, and merge the workload set and all workloads together. - -7. **Publish to nuget.org** (on release day) - - Publish packs for all workloads - - Query nuget.org until the packs are available - - Publish manifests for all workloads - - Query nuget.org until the manifests are available - - Publish the workload set - -## Additional Details - -### Approvals - -In the above steps, step 3 will require approval in GitHub. Additionally, steps 4.b, 4.c, 5, and 7 should each require approval in the workload staging pipeline (specifically any step that does publishing to feeds, NuGet, or VS should require an approval within the pipeline). The reason for this is so we can have the pipeline run automatically on all PRs but only publish the changes we want to. Workload partners may have changes they want to publish internally for testing but not go to NuGet or VS, teams may have changes they want to publish on NuGet.org but not publish to VS, etc. - -Basically, everything should happen automatically once merged but we want to control whether we publish the workloads to the feeds, to VS, and to nuget.org so we don't publish by default. - -### We have a preview workloads feed for each major version of .NET -- dotnet8-workloads https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet8-workloads/nuget/v3/index.json -- dotnet9-workloads https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet9-workloads/nuget/v3/index.json diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml deleted file mode 100644 index cbde7788b..000000000 --- a/eng/Version.Details.xml +++ /dev/null @@ -1,49 +0,0 @@ - - - - - https://github.com/dotnet/dotnet - 30000d883e06c122311a66894579bc12329a09d4 - - - - https://github.com/dotnet/dotnet - 30000d883e06c122311a66894579bc12329a09d4 - - - https://github.com/dotnet/android - 903250b8a3be2c3bed89e7740049c73f41396a8a - - - https://github.com/dotnet/macios - b7705312033433f3f4de8fcc314c49ef2954261a - - - https://github.com/dotnet/macios - b7705312033433f3f4de8fcc314c49ef2954261a - - - https://github.com/dotnet/macios - b7705312033433f3f4de8fcc314c49ef2954261a - - - https://github.com/dotnet/macios - b7705312033433f3f4de8fcc314c49ef2954261a - - - https://github.com/dotnet/maui - 0838a9b9a63cf7fd299b9a9c5f62d5811a7d1ae8 - - - https://github.com/dotnet/dotnet - 30000d883e06c122311a66894579bc12329a09d4 - - - - - https://github.com/dotnet/dotnet - 30000d883e06c122311a66894579bc12329a09d4 - - - - diff --git a/eng/Versions.props b/eng/Versions.props deleted file mode 100644 index 1a74ed6dc..000000000 --- a/eng/Versions.props +++ /dev/null @@ -1,74 +0,0 @@ - - - - 10 - 0 - 1 - - - 00 - 0 - - false - preview - 7 - - - $(VersionMajor).$(VersionSDKMinor)$(VersionFeature).$(VersionPatch) - - $(VersionMajor).$(VersionMinor).$(VersionSDKMinor)$(VersionFeature) - $(VersionMajor).$(VersionMinor).$(VersionSDKMinor)00 - release - $(SDKFeatureBand)-$(PreReleaseVersionLabel).$(PreReleaseVersionIteration) - - $(WorkloadsVersion).$(VersionPatch) - - - - - https://dotnetbuilds.blob.core.windows.net/public/ - https://dotnetclimsrc.blob.core.windows.net/dotnet/ - - - $(ArcadeSdkVersion) - $(ArcadeSdkVersion) - - - 3.14.0-8606.20240208.1 - 1.1.392 - - - - 10.0.100-preview.7.25380.108 - $(MicrosoftNETWorkloadEmscriptenCurrentManifest100100TransportPackageVersion) - - 10.0.100$([System.Text.RegularExpressions.Regex]::Match($(EmscriptenWorkloadManifestVersion), `-[A-z]*[\.]*\d*`)) - - - 10.0.100-preview.7 - 36.0.0-preview.7.229 - 18.5.10601-net10-p7 - 18.5.10601-net10-p7 - 18.5.10601-net10-p7 - 15.5.10601-net10-p7 - 10.0.0-preview.7.25381.4 - $(MicrosoftNETSdkMauiManifest100100preview7PackageVersion) - $(MicrosoftNETSdkAndroidManifest100100preview7PackageVersion) - $(MicrosoftNETSdkiOSManifest100100preview7PackageVersion) - $(MicrosoftNETSdkMacCatalystManifest100100preview7PackageVersion) - $(MicrosoftNETSdkmacOSManifest100100preview7PackageVersion) - $(MicrosoftNETSdktvOSManifest100100preview7PackageVersion) - - - 10.0.0-preview.7.25380.108 - 10.0.100-preview.7.25380.108 - - $(MicrosoftNETSdkPackageVersion) - - 10.0.100$([System.Text.RegularExpressions.Regex]::Match($(MonoWorkloadManifestVersion), `-[A-z]*[\.]*\d*`)) - - - - 2.0.0-preview.1.24406.1 - - diff --git a/eng/common/BuildConfiguration/build-configuration.json b/eng/common/BuildConfiguration/build-configuration.json deleted file mode 100644 index 3d1cc8989..000000000 --- a/eng/common/BuildConfiguration/build-configuration.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "RetryCountLimit": 1, - "RetryByAnyError": false -} diff --git a/eng/common/CIBuild.cmd b/eng/common/CIBuild.cmd deleted file mode 100644 index ac1f72bf9..000000000 --- a/eng/common/CIBuild.cmd +++ /dev/null @@ -1,2 +0,0 @@ -@echo off -powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*" diff --git a/eng/common/PSScriptAnalyzerSettings.psd1 b/eng/common/PSScriptAnalyzerSettings.psd1 deleted file mode 100644 index 4c1ea7c98..000000000 --- a/eng/common/PSScriptAnalyzerSettings.psd1 +++ /dev/null @@ -1,11 +0,0 @@ -@{ - IncludeRules=@('PSAvoidUsingCmdletAliases', - 'PSAvoidUsingWMICmdlet', - 'PSAvoidUsingPositionalParameters', - 'PSAvoidUsingInvokeExpression', - 'PSUseDeclaredVarsMoreThanAssignments', - 'PSUseCmdletCorrectly', - 'PSStandardDSCFunctionsInResource', - 'PSUseIdenticalMandatoryParametersForDSC', - 'PSUseIdenticalParametersForDSC') -} \ No newline at end of file diff --git a/eng/common/README.md b/eng/common/README.md deleted file mode 100644 index ff49c3715..000000000 --- a/eng/common/README.md +++ /dev/null @@ -1,28 +0,0 @@ -# Don't touch this folder - - uuuuuuuuuuuuuuuuuuuu - u" uuuuuuuuuuuuuuuuuu "u - u" u$$$$$$$$$$$$$$$$$$$$u "u - u" u$$$$$$$$$$$$$$$$$$$$$$$$u "u - u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u - u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u - u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u - $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $ - $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $ - $ $$$" ... "$... ...$" ... "$$$ ... "$$$ $ - $ $$$u `"$$$$$$$ $$$ $$$$$ $$ $$$ $$$ $ - $ $$$$$$uu "$$$$ $$$ $$$$$ $$ """ u$$$ $ - $ $$$""$$$ $$$$ $$$u "$$$" u$$ $$$$$$$$ $ - $ $$$$....,$$$$$..$$$$$....,$$$$..$$$$$$$$ $ - $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $ - "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u" - "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u" - "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u" - "u "$$$$$$$$$$$$$$$$$$$$$$$$" u" - "u "$$$$$$$$$$$$$$$$$$$$" u" - "u """""""""""""""""" u" - """""""""""""""""""" - -!!! Changes made in this directory are subject to being overwritten by automation !!! - -The files in this directory are shared by all Arcade repos and managed by automation. If you need to make changes to these files, open an issue or submit a pull request to https://github.com/dotnet/arcade first. diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1 deleted file mode 100644 index 5db4ad71e..000000000 --- a/eng/common/SetupNugetSources.ps1 +++ /dev/null @@ -1,171 +0,0 @@ -# This script adds internal feeds required to build commits that depend on internal package sources. For instance, -# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. In addition also enables -# disabled internal Maestro (darc-int*) feeds. -# -# Optionally, this script also adds a credential entry for each of the internal feeds if supplied. -# -# See example call for this script below. -# -# - task: PowerShell@2 -# displayName: Setup Private Feeds Credentials -# condition: eq(variables['Agent.OS'], 'Windows_NT') -# inputs: -# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 -# arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token -# env: -# Token: $(dn-bot-dnceng-artifact-feeds-rw) -# -# Note that the NuGetAuthenticate task should be called after SetupNugetSources. -# This ensures that: -# - Appropriate creds are set for the added internal feeds (if not supplied to the scrupt) -# - The credential provider is installed. -# -# This logic is also abstracted into enable-internal-sources.yml. - -[CmdletBinding()] -param ( - [Parameter(Mandatory = $true)][string]$ConfigFile, - $Password -) - -$ErrorActionPreference = "Stop" -Set-StrictMode -Version 2.0 -[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 - -. $PSScriptRoot\tools.ps1 - -# Add source entry to PackageSources -function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $pwd) { - $packageSource = $sources.SelectSingleNode("add[@key='$SourceName']") - - if ($packageSource -eq $null) - { - $packageSource = $doc.CreateElement("add") - $packageSource.SetAttribute("key", $SourceName) - $packageSource.SetAttribute("value", $SourceEndPoint) - $sources.AppendChild($packageSource) | Out-Null - } - else { - Write-Host "Package source $SourceName already present." - } - - AddCredential -Creds $creds -Source $SourceName -Username $Username -pwd $pwd -} - -# Add a credential node for the specified source -function AddCredential($creds, $source, $username, $pwd) { - # If no cred supplied, don't do anything. - if (!$pwd) { - return; - } - - # Looks for credential configuration for the given SourceName. Create it if none is found. - $sourceElement = $creds.SelectSingleNode($Source) - if ($sourceElement -eq $null) - { - $sourceElement = $doc.CreateElement($Source) - $creds.AppendChild($sourceElement) | Out-Null - } - - # Add the node to the credential if none is found. - $usernameElement = $sourceElement.SelectSingleNode("add[@key='Username']") - if ($usernameElement -eq $null) - { - $usernameElement = $doc.CreateElement("add") - $usernameElement.SetAttribute("key", "Username") - $sourceElement.AppendChild($usernameElement) | Out-Null - } - $usernameElement.SetAttribute("value", $Username) - - # Add the to the credential if none is found. - # Add it as a clear text because there is no support for encrypted ones in non-windows .Net SDKs. - # -> https://github.com/NuGet/Home/issues/5526 - $passwordElement = $sourceElement.SelectSingleNode("add[@key='ClearTextPassword']") - if ($passwordElement -eq $null) - { - $passwordElement = $doc.CreateElement("add") - $passwordElement.SetAttribute("key", "ClearTextPassword") - $sourceElement.AppendChild($passwordElement) | Out-Null - } - - $passwordElement.SetAttribute("value", $pwd) -} - -function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $pwd) { - $maestroPrivateSources = $Sources.SelectNodes("add[contains(@key,'darc-int')]") - - Write-Host "Inserting credentials for $($maestroPrivateSources.Count) Maestro's private feeds." - - ForEach ($PackageSource in $maestroPrivateSources) { - Write-Host "`tInserting credential for Maestro's feed:" $PackageSource.Key - AddCredential -Creds $creds -Source $PackageSource.Key -Username $Username -pwd $pwd - } -} - -function EnablePrivatePackageSources($DisabledPackageSources) { - $maestroPrivateSources = $DisabledPackageSources.SelectNodes("add[contains(@key,'darc-int')]") - ForEach ($DisabledPackageSource in $maestroPrivateSources) { - Write-Host "`tEnsuring private source '$($DisabledPackageSource.key)' is enabled by deleting it from disabledPackageSource" - # Due to https://github.com/NuGet/Home/issues/10291, we must actually remove the disabled entries - $DisabledPackageSources.RemoveChild($DisabledPackageSource) - } -} - -if (!(Test-Path $ConfigFile -PathType Leaf)) { - Write-PipelineTelemetryError -Category 'Build' -Message "Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. Couldn't find the NuGet config file: $ConfigFile" - ExitWithExitCode 1 -} - -# Load NuGet.config -$doc = New-Object System.Xml.XmlDocument -$filename = (Get-Item $ConfigFile).FullName -$doc.Load($filename) - -# Get reference to or create one if none exist already -$sources = $doc.DocumentElement.SelectSingleNode("packageSources") -if ($sources -eq $null) { - $sources = $doc.CreateElement("packageSources") - $doc.DocumentElement.AppendChild($sources) | Out-Null -} - -$creds = $null -if ($Password) { - # Looks for a node. Create it if none is found. - $creds = $doc.DocumentElement.SelectSingleNode("packageSourceCredentials") - if ($creds -eq $null) { - $creds = $doc.CreateElement("packageSourceCredentials") - $doc.DocumentElement.AppendChild($creds) | Out-Null - } -} - -# Check for disabledPackageSources; we'll enable any darc-int ones we find there -$disabledSources = $doc.DocumentElement.SelectSingleNode("disabledPackageSources") -if ($disabledSources -ne $null) { - Write-Host "Checking for any darc-int disabled package sources in the disabledPackageSources node" - EnablePrivatePackageSources -DisabledPackageSources $disabledSources -} - -$userName = "dn-bot" - -# Insert credential nodes for Maestro's private feeds -InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -pwd $Password - -# 3.1 uses a different feed url format so it's handled differently here -$dotnet31Source = $sources.SelectSingleNode("add[@key='dotnet3.1']") -if ($dotnet31Source -ne $null) { - AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password - AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password -} - -$dotnetVersions = @('5','6','7','8','9') - -foreach ($dotnetVersion in $dotnetVersions) { - $feedPrefix = "dotnet" + $dotnetVersion; - $dotnetSource = $sources.SelectSingleNode("add[@key='$feedPrefix']") - if ($dotnetSource -ne $null) { - AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password - AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password - } -} - -$doc.Save($filename) diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh deleted file mode 100755 index 4604b61b0..000000000 --- a/eng/common/SetupNugetSources.sh +++ /dev/null @@ -1,167 +0,0 @@ -#!/usr/bin/env bash - -# This script adds internal feeds required to build commits that depend on internal package sources. For instance, -# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. In addition also enables -# disabled internal Maestro (darc-int*) feeds. -# -# Optionally, this script also adds a credential entry for each of the internal feeds if supplied. -# -# See example call for this script below. -# -# - task: Bash@3 -# displayName: Setup Internal Feeds -# inputs: -# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh -# arguments: $(Build.SourcesDirectory)/NuGet.config -# condition: ne(variables['Agent.OS'], 'Windows_NT') -# - task: NuGetAuthenticate@1 -# -# Note that the NuGetAuthenticate task should be called after SetupNugetSources. -# This ensures that: -# - Appropriate creds are set for the added internal feeds (if not supplied to the scrupt) -# - The credential provider is installed. -# -# This logic is also abstracted into enable-internal-sources.yml. - -ConfigFile=$1 -CredToken=$2 -NL='\n' -TB=' ' - -source="${BASH_SOURCE[0]}" - -# resolve $source until the file is no longer a symlink -while [[ -h "$source" ]]; do - scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - source="$(readlink "$source")" - # if $source was a relative symlink, we need to resolve it relative to the path where the - # symlink file was located - [[ $source != /* ]] && source="$scriptroot/$source" -done -scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - -. "$scriptroot/tools.sh" - -if [ ! -f "$ConfigFile" ]; then - Write-PipelineTelemetryError -Category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. Couldn't find the NuGet config file: $ConfigFile" - ExitWithExitCode 1 -fi - -if [[ `uname -s` == "Darwin" ]]; then - NL=$'\\\n' - TB='' -fi - -# Ensure there is a ... section. -grep -i "" $ConfigFile -if [ "$?" != "0" ]; then - echo "Adding ... section." - ConfigNodeHeader="" - PackageSourcesTemplate="${TB}${NL}${TB}" - - sed -i.bak "s|$ConfigNodeHeader|$ConfigNodeHeader${NL}$PackageSourcesTemplate|" $ConfigFile -fi - -# Ensure there is a ... section. -grep -i "" $ConfigFile -if [ "$?" != "0" ]; then - echo "Adding ... section." - - PackageSourcesNodeFooter="" - PackageSourceCredentialsTemplate="${TB}${NL}${TB}" - - sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourcesNodeFooter${NL}$PackageSourceCredentialsTemplate|" $ConfigFile -fi - -PackageSources=() - -# Ensure dotnet3.1-internal and dotnet3.1-internal-transport are in the packageSources if the public dotnet3.1 feeds are present -grep -i "" - - sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile - fi - PackageSources+=('dotnet3.1-internal') - - grep -i "" $ConfigFile - if [ "$?" != "0" ]; then - echo "Adding dotnet3.1-internal-transport to the packageSources." - PackageSourcesNodeFooter="" - PackageSourceTemplate="${TB}" - - sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile - fi - PackageSources+=('dotnet3.1-internal-transport') -fi - -DotNetVersions=('5' '6' '7' '8' '9') - -for DotNetVersion in ${DotNetVersions[@]} ; do - FeedPrefix="dotnet${DotNetVersion}"; - grep -i "" - - sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile - fi - PackageSources+=("$FeedPrefix-internal") - - grep -i "" $ConfigFile - if [ "$?" != "0" ]; then - echo "Adding $FeedPrefix-internal-transport to the packageSources." - PackageSourcesNodeFooter="" - PackageSourceTemplate="${TB}" - - sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile - fi - PackageSources+=("$FeedPrefix-internal-transport") - fi -done - -# I want things split line by line -PrevIFS=$IFS -IFS=$'\n' -PackageSources+="$IFS" -PackageSources+=$(grep -oh '"darc-int-[^"]*"' $ConfigFile | tr -d '"') -IFS=$PrevIFS - -if [ "$CredToken" ]; then - for FeedName in ${PackageSources[@]} ; do - # Check if there is no existing credential for this FeedName - grep -i "<$FeedName>" $ConfigFile - if [ "$?" != "0" ]; then - echo "Adding credentials for $FeedName." - - PackageSourceCredentialsNodeFooter="" - NewCredential="${TB}${TB}<$FeedName>${NL}${NL}${NL}" - - sed -i.bak "s|$PackageSourceCredentialsNodeFooter|$NewCredential${NL}$PackageSourceCredentialsNodeFooter|" $ConfigFile - fi - done -fi - -# Re-enable any entries in disabledPackageSources where the feed name contains darc-int -grep -i "" $ConfigFile -if [ "$?" == "0" ]; then - DisabledDarcIntSources=() - echo "Re-enabling any disabled \"darc-int\" package sources in $ConfigFile" - DisabledDarcIntSources+=$(grep -oh '"darc-int-[^"]*" value="true"' $ConfigFile | tr -d '"') - for DisabledSourceName in ${DisabledDarcIntSources[@]} ; do - if [[ $DisabledSourceName == darc-int* ]] - then - OldDisableValue="" - NewDisableValue="" - sed -i.bak "s|$OldDisableValue|$NewDisableValue|" $ConfigFile - echo "Neutralized disablePackageSources entry for '$DisabledSourceName'" - fi - done -fi diff --git a/eng/common/build.cmd b/eng/common/build.cmd deleted file mode 100644 index 99daf368a..000000000 --- a/eng/common/build.cmd +++ /dev/null @@ -1,3 +0,0 @@ -@echo off -powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0build.ps1""" %*" -exit /b %ErrorLevel% diff --git a/eng/common/build.ps1 b/eng/common/build.ps1 deleted file mode 100644 index 8cfee107e..000000000 --- a/eng/common/build.ps1 +++ /dev/null @@ -1,178 +0,0 @@ -[CmdletBinding(PositionalBinding=$false)] -Param( - [string][Alias('c')]$configuration = "Debug", - [string]$platform = $null, - [string] $projects, - [string][Alias('v')]$verbosity = "minimal", - [string] $msbuildEngine = $null, - [bool] $warnAsError = $true, - [bool] $nodeReuse = $true, - [switch] $buildCheck = $false, - [switch][Alias('r')]$restore, - [switch] $deployDeps, - [switch][Alias('b')]$build, - [switch] $rebuild, - [switch] $deploy, - [switch][Alias('t')]$test, - [switch] $integrationTest, - [switch] $performanceTest, - [switch] $sign, - [switch] $pack, - [switch] $publish, - [switch] $clean, - [switch][Alias('pb')]$productBuild, - [switch]$fromVMR, - [switch][Alias('bl')]$binaryLog, - [switch][Alias('nobl')]$excludeCIBinarylog, - [switch] $ci, - [switch] $prepareMachine, - [string] $runtimeSourceFeed = '', - [string] $runtimeSourceFeedKey = '', - [switch] $excludePrereleaseVS, - [switch] $nativeToolsOnMachine, - [switch] $help, - [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties -) - -# Unset 'Platform' environment variable to avoid unwanted collision in InstallDotNetCore.targets file -# some computer has this env var defined (e.g. Some HP) -if($env:Platform) { - $env:Platform="" -} -function Print-Usage() { - Write-Host "Common settings:" - Write-Host " -configuration Build configuration: 'Debug' or 'Release' (short: -c)" - Write-Host " -platform Platform configuration: 'x86', 'x64' or any valid Platform value to pass to msbuild" - Write-Host " -verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)" - Write-Host " -binaryLog Output binary log (short: -bl)" - Write-Host " -help Print help and exit" - Write-Host "" - - Write-Host "Actions:" - Write-Host " -restore Restore dependencies (short: -r)" - Write-Host " -build Build solution (short: -b)" - Write-Host " -rebuild Rebuild solution" - Write-Host " -deploy Deploy built VSIXes" - Write-Host " -deployDeps Deploy dependencies (e.g. VSIXes for integration tests)" - Write-Host " -test Run all unit tests in the solution (short: -t)" - Write-Host " -integrationTest Run all integration tests in the solution" - Write-Host " -performanceTest Run all performance tests in the solution" - Write-Host " -pack Package build outputs into NuGet packages and Willow components" - Write-Host " -sign Sign build outputs" - Write-Host " -publish Publish artifacts (e.g. symbols)" - Write-Host " -clean Clean the solution" - Write-Host " -productBuild Build the solution in the way it will be built in the full .NET product (VMR) build (short: -pb)" - Write-Host "" - - Write-Host "Advanced settings:" - Write-Host " -projects Semi-colon delimited list of sln/proj's to build. Globbing is supported (*.sln)" - Write-Host " -ci Set when running on CI server" - Write-Host " -excludeCIBinarylog Don't output binary log (short: -nobl)" - Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build" - Write-Host " -warnAsError Sets warnaserror msbuild parameter ('true' or 'false')" - Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)." - Write-Host " -excludePrereleaseVS Set to exclude build engines in prerelease versions of Visual Studio" - Write-Host " -nativeToolsOnMachine Sets the native tools on machine environment variable (indicating that the script should use native tools on machine)" - Write-Host " -nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')" - Write-Host " -buildCheck Sets /check msbuild parameter" - Write-Host " -fromVMR Set when building from within the VMR" - Write-Host "" - - Write-Host "Command line arguments not listed above are passed thru to msbuild." - Write-Host "The above arguments can be shortened as much as to be unambiguous (e.g. -co for configuration, -t for test, etc.)." -} - -. $PSScriptRoot\tools.ps1 - -function InitializeCustomToolset { - if (-not $restore) { - return - } - - $script = Join-Path $EngRoot 'restore-toolset.ps1' - - if (Test-Path $script) { - . $script - } -} - -function Build { - $toolsetBuildProj = InitializeToolset - InitializeCustomToolset - - $bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'Build.binlog') } else { '' } - $platformArg = if ($platform) { "/p:Platform=$platform" } else { '' } - $check = if ($buildCheck) { '/check' } else { '' } - - if ($projects) { - # Re-assign properties to a new variable because PowerShell doesn't let us append properties directly for unclear reasons. - # Explicitly set the type as string[] because otherwise PowerShell would make this char[] if $properties is empty. - [string[]] $msbuildArgs = $properties - - # Resolve relative project paths into full paths - $projects = ($projects.Split(';').ForEach({Resolve-Path $_}) -join ';') - - $msbuildArgs += "/p:Projects=$projects" - $properties = $msbuildArgs - } - - MSBuild $toolsetBuildProj ` - $bl ` - $platformArg ` - $check ` - /p:Configuration=$configuration ` - /p:RepoRoot=$RepoRoot ` - /p:Restore=$restore ` - /p:DeployDeps=$deployDeps ` - /p:Build=$build ` - /p:Rebuild=$rebuild ` - /p:Deploy=$deploy ` - /p:Test=$test ` - /p:Pack=$pack ` - /p:DotNetBuild=$productBuild ` - /p:DotNetBuildFromVMR=$fromVMR ` - /p:IntegrationTest=$integrationTest ` - /p:PerformanceTest=$performanceTest ` - /p:Sign=$sign ` - /p:Publish=$publish ` - /p:RestoreStaticGraphEnableBinaryLogger=$binaryLog ` - @properties -} - -try { - if ($clean) { - if (Test-Path $ArtifactsDir) { - Remove-Item -Recurse -Force $ArtifactsDir - Write-Host 'Artifacts directory deleted.' - } - exit 0 - } - - if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) { - Print-Usage - exit 0 - } - - if ($ci) { - if (-not $excludeCIBinarylog) { - $binaryLog = $true - } - $nodeReuse = $false - } - - if ($nativeToolsOnMachine) { - $env:NativeToolsOnMachine = $true - } - if ($restore) { - InitializeNativeTools - } - - Build -} -catch { - Write-Host $_.ScriptStackTrace - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_ - ExitWithExitCode 1 -} - -ExitWithExitCode 0 diff --git a/eng/common/build.sh b/eng/common/build.sh deleted file mode 100755 index 9767bb411..000000000 --- a/eng/common/build.sh +++ /dev/null @@ -1,277 +0,0 @@ -#!/usr/bin/env bash - -# Stop script if unbound variable found (use ${var:-} if intentional) -set -u - -# Stop script if command returns non-zero exit code. -# Prevents hidden errors caused by missing error code propagation. -set -e - -usage() -{ - echo "Common settings:" - echo " --configuration Build configuration: 'Debug' or 'Release' (short: -c)" - echo " --verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)" - echo " --binaryLog Create MSBuild binary log (short: -bl)" - echo " --help Print help and exit (short: -h)" - echo "" - - echo "Actions:" - echo " --restore Restore dependencies (short: -r)" - echo " --build Build solution (short: -b)" - echo " --sourceBuild Source-build the solution (short: -sb)" - echo " Will additionally trigger the following actions: --restore, --build, --pack" - echo " If --configuration is not set explicitly, will also set it to 'Release'" - echo " --productBuild Build the solution in the way it will be built in the full .NET product (VMR) build (short: -pb)" - echo " Will additionally trigger the following actions: --restore, --build, --pack" - echo " If --configuration is not set explicitly, will also set it to 'Release'" - echo " --rebuild Rebuild solution" - echo " --test Run all unit tests in the solution (short: -t)" - echo " --integrationTest Run all integration tests in the solution" - echo " --performanceTest Run all performance tests in the solution" - echo " --pack Package build outputs into NuGet packages and Willow components" - echo " --sign Sign build outputs" - echo " --publish Publish artifacts (e.g. symbols)" - echo " --clean Clean the solution" - echo "" - - echo "Advanced settings:" - echo " --projects Project or solution file(s) to build" - echo " --ci Set when running on CI server" - echo " --excludeCIBinarylog Don't output binary log (short: -nobl)" - echo " --prepareMachine Prepare machine for CI run, clean up processes after build" - echo " --nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')" - echo " --warnAsError Sets warnaserror msbuild parameter ('true' or 'false')" - echo " --buildCheck Sets /check msbuild parameter" - echo " --fromVMR Set when building from within the VMR" - echo "" - echo "Command line arguments not listed above are passed thru to msbuild." - echo "Arguments can also be passed in with a single hyphen." -} - -source="${BASH_SOURCE[0]}" - -# resolve $source until the file is no longer a symlink -while [[ -h "$source" ]]; do - scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - source="$(readlink "$source")" - # if $source was a relative symlink, we need to resolve it relative to the path where the - # symlink file was located - [[ $source != /* ]] && source="$scriptroot/$source" -done -scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - -restore=false -build=false -source_build=false -product_build=false -from_vmr=false -rebuild=false -test=false -integration_test=false -performance_test=false -pack=false -publish=false -sign=false -public=false -ci=false -clean=false - -warn_as_error=true -node_reuse=true -build_check=false -binary_log=false -exclude_ci_binary_log=false -pipelines_log=false - -projects='' -configuration='' -prepare_machine=false -verbosity='minimal' -runtime_source_feed='' -runtime_source_feed_key='' - -properties=() -while [[ $# > 0 ]]; do - opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" - case "$opt" in - -help|-h) - usage - exit 0 - ;; - -clean) - clean=true - ;; - -configuration|-c) - configuration=$2 - shift - ;; - -verbosity|-v) - verbosity=$2 - shift - ;; - -binarylog|-bl) - binary_log=true - ;; - -excludecibinarylog|-nobl) - exclude_ci_binary_log=true - ;; - -pipelineslog|-pl) - pipelines_log=true - ;; - -restore|-r) - restore=true - ;; - -build|-b) - build=true - ;; - -rebuild) - rebuild=true - ;; - -pack) - pack=true - ;; - -sourcebuild|-source-build|-sb) - build=true - source_build=true - product_build=true - restore=true - pack=true - ;; - -productbuild|-product-build|-pb) - build=true - product_build=true - restore=true - pack=true - ;; - -fromvmr|-from-vmr) - from_vmr=true - ;; - -test|-t) - test=true - ;; - -integrationtest) - integration_test=true - ;; - -performancetest) - performance_test=true - ;; - -sign) - sign=true - ;; - -publish) - publish=true - ;; - -preparemachine) - prepare_machine=true - ;; - -projects) - projects=$2 - shift - ;; - -ci) - ci=true - ;; - -warnaserror) - warn_as_error=$2 - shift - ;; - -nodereuse) - node_reuse=$2 - shift - ;; - -buildcheck) - build_check=true - ;; - -runtimesourcefeed) - runtime_source_feed=$2 - shift - ;; - -runtimesourcefeedkey) - runtime_source_feed_key=$2 - shift - ;; - *) - properties+=("$1") - ;; - esac - - shift -done - -if [[ -z "$configuration" ]]; then - if [[ "$source_build" = true ]]; then configuration="Release"; else configuration="Debug"; fi -fi - -if [[ "$ci" == true ]]; then - pipelines_log=true - node_reuse=false - if [[ "$exclude_ci_binary_log" == false ]]; then - binary_log=true - fi -fi - -. "$scriptroot/tools.sh" - -function InitializeCustomToolset { - local script="$eng_root/restore-toolset.sh" - - if [[ -a "$script" ]]; then - . "$script" - fi -} - -function Build { - InitializeToolset - InitializeCustomToolset - - if [[ ! -z "$projects" ]]; then - properties+=("/p:Projects=$projects") - fi - - local bl="" - if [[ "$binary_log" == true ]]; then - bl="/bl:\"$log_dir/Build.binlog\"" - fi - - local check="" - if [[ "$build_check" == true ]]; then - check="/check" - fi - - MSBuild $_InitializeToolset \ - $bl \ - $check \ - /p:Configuration=$configuration \ - /p:RepoRoot="$repo_root" \ - /p:Restore=$restore \ - /p:Build=$build \ - /p:DotNetBuild=$product_build \ - /p:DotNetBuildSourceOnly=$source_build \ - /p:DotNetBuildFromVMR=$from_vmr \ - /p:Rebuild=$rebuild \ - /p:Test=$test \ - /p:Pack=$pack \ - /p:IntegrationTest=$integration_test \ - /p:PerformanceTest=$performance_test \ - /p:Sign=$sign \ - /p:Publish=$publish \ - /p:RestoreStaticGraphEnableBinaryLogger=$binary_log \ - ${properties[@]+"${properties[@]}"} - - ExitWithExitCode 0 -} - -if [[ "$clean" == true ]]; then - if [ -d "$artifacts_dir" ]; then - rm -rf $artifacts_dir - echo "Artifacts directory deleted." - fi - exit 0 -fi - -if [[ "$restore" == true ]]; then - InitializeNativeTools -fi - -Build diff --git a/eng/common/cibuild.sh b/eng/common/cibuild.sh deleted file mode 100755 index 66e3b0ac6..000000000 --- a/eng/common/cibuild.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env bash - -source="${BASH_SOURCE[0]}" - -# resolve $SOURCE until the file is no longer a symlink -while [[ -h $source ]]; do - scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - source="$(readlink "$source")" - - # if $source was a relative symlink, we need to resolve it relative to the path where - # the symlink file was located - [[ $source != /* ]] && source="$scriptroot/$source" -done -scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - -. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@ diff --git a/eng/common/core-templates/job/job.yml b/eng/common/core-templates/job/job.yml deleted file mode 100644 index 6badecba7..000000000 --- a/eng/common/core-templates/job/job.yml +++ /dev/null @@ -1,225 +0,0 @@ -parameters: -# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job - cancelTimeoutInMinutes: '' - condition: '' - container: '' - continueOnError: false - dependsOn: '' - displayName: '' - pool: '' - steps: [] - strategy: '' - timeoutInMinutes: '' - variables: [] - workspace: '' - templateContext: {} - -# Job base template specific parameters - # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md - # publishing defaults - artifacts: '' - enableMicrobuild: false - enableMicrobuildForMacAndLinux: false - enablePublishBuildArtifacts: false - enablePublishBuildAssets: false - enablePublishTestResults: false - enableBuildRetry: false - mergeTestResults: false - testRunTitle: '' - testResultsFormat: '' - name: '' - componentGovernanceSteps: [] - preSteps: [] - artifactPublishSteps: [] - runAsPublic: false - -# 1es specific parameters - is1ESPipeline: '' - -jobs: -- job: ${{ parameters.name }} - - ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}: - cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }} - - ${{ if ne(parameters.condition, '') }}: - condition: ${{ parameters.condition }} - - ${{ if ne(parameters.container, '') }}: - container: ${{ parameters.container }} - - ${{ if ne(parameters.continueOnError, '') }}: - continueOnError: ${{ parameters.continueOnError }} - - ${{ if ne(parameters.dependsOn, '') }}: - dependsOn: ${{ parameters.dependsOn }} - - ${{ if ne(parameters.displayName, '') }}: - displayName: ${{ parameters.displayName }} - - ${{ if ne(parameters.pool, '') }}: - pool: ${{ parameters.pool }} - - ${{ if ne(parameters.strategy, '') }}: - strategy: ${{ parameters.strategy }} - - ${{ if ne(parameters.timeoutInMinutes, '') }}: - timeoutInMinutes: ${{ parameters.timeoutInMinutes }} - - ${{ if ne(parameters.templateContext, '') }}: - templateContext: ${{ parameters.templateContext }} - - variables: - - ${{ if ne(parameters.enableTelemetry, 'false') }}: - - name: DOTNET_CLI_TELEMETRY_PROFILE - value: '$(Build.Repository.Uri)' - # Retry signature validation up to three times, waiting 2 seconds between attempts. - # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures - - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY - value: 3,2000 - - ${{ each variable in parameters.variables }}: - # handle name-value variable syntax - # example: - # - name: [key] - # value: [value] - - ${{ if ne(variable.name, '') }}: - - name: ${{ variable.name }} - value: ${{ variable.value }} - - # handle variable groups - - ${{ if ne(variable.group, '') }}: - - group: ${{ variable.group }} - - # handle template variable syntax - # example: - # - template: path/to/template.yml - # parameters: - # [key]: [value] - - ${{ if ne(variable.template, '') }}: - - template: ${{ variable.template }} - ${{ if ne(variable.parameters, '') }}: - parameters: ${{ variable.parameters }} - - # handle key-value variable syntax. - # example: - # - [key]: [value] - - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}: - - ${{ each pair in variable }}: - - name: ${{ pair.key }} - value: ${{ pair.value }} - - # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds - - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - group: DotNet-HelixApi-Access - - ${{ if ne(parameters.workspace, '') }}: - workspace: ${{ parameters.workspace }} - - steps: - - ${{ if eq(parameters.is1ESPipeline, '') }}: - - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error - - - ${{ if ne(parameters.preSteps, '') }}: - - ${{ each preStep in parameters.preSteps }}: - - ${{ preStep }} - - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - template: /eng/common/core-templates/steps/install-microbuild.yml - parameters: - enableMicrobuild: ${{ parameters.enableMicrobuild }} - enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }} - continueOnError: ${{ parameters.continueOnError }} - - - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}: - - task: NuGetAuthenticate@1 - - - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}: - - task: DownloadPipelineArtifact@2 - inputs: - buildType: current - artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }} - targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }} - itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }} - - - ${{ each step in parameters.steps }}: - - ${{ step }} - - - ${{ each step in parameters.componentGovernanceSteps }}: - - ${{ step }} - - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - template: /eng/common/core-templates/steps/cleanup-microbuild.yml - parameters: - enableMicrobuild: ${{ parameters.enableMicrobuild }} - enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }} - continueOnError: ${{ parameters.continueOnError }} - - # Publish test results - - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}: - - task: PublishTestResults@2 - displayName: Publish XUnit Test Results - inputs: - testResultsFormat: 'xUnit' - testResultsFiles: '*.xml' - searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' - testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit - mergeTestResults: ${{ parameters.mergeTestResults }} - continueOnError: true - condition: always() - - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}: - - task: PublishTestResults@2 - displayName: Publish TRX Test Results - inputs: - testResultsFormat: 'VSTest' - testResultsFiles: '*.trx' - searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' - testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx - mergeTestResults: ${{ parameters.mergeTestResults }} - continueOnError: true - condition: always() - - # gather artifacts - - ${{ if ne(parameters.artifacts.publish, '') }}: - - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: - - task: CopyFiles@2 - displayName: Gather binaries for publish to artifacts - inputs: - SourceFolder: 'artifacts/bin' - Contents: '**' - TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin' - - task: CopyFiles@2 - displayName: Gather packages for publish to artifacts - inputs: - SourceFolder: 'artifacts/packages' - Contents: '**' - TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages' - - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: - - task: CopyFiles@2 - displayName: Gather logs for publish to artifacts - inputs: - SourceFolder: 'artifacts/log' - Contents: '**' - TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log' - continueOnError: true - condition: always() - - - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: - - task: CopyFiles@2 - displayName: Gather logs for publish to artifacts - inputs: - SourceFolder: 'artifacts/log/$(_BuildConfig)' - Contents: '**' - TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)' - continueOnError: true - condition: always() - - ${{ if eq(parameters.enableBuildRetry, 'true') }}: - - task: CopyFiles@2 - displayName: Gather buildconfiguration for build retry - inputs: - SourceFolder: '$(Build.SourcesDirectory)/eng/common/BuildConfiguration' - Contents: '**' - TargetFolder: '$(Build.ArtifactStagingDirectory)/eng/common/BuildConfiguration' - continueOnError: true - condition: always() - - ${{ each step in parameters.artifactPublishSteps }}: - - ${{ step }} diff --git a/eng/common/core-templates/job/onelocbuild.yml b/eng/common/core-templates/job/onelocbuild.yml deleted file mode 100644 index 8034815f4..000000000 --- a/eng/common/core-templates/job/onelocbuild.yml +++ /dev/null @@ -1,120 +0,0 @@ -parameters: - # Optional: dependencies of the job - dependsOn: '' - - # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool - pool: '' - - CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex - GithubPat: $(BotAccount-dotnet-bot-repo-PAT) - - SourcesDirectory: $(Build.SourcesDirectory) - CreatePr: true - AutoCompletePr: false - ReusePr: true - UseLfLineEndings: true - UseCheckedInLocProjectJson: false - SkipLocProjectJsonGeneration: false - LanguageSet: VS_Main_Languages - LclSource: lclFilesInRepo - LclPackageId: '' - RepoType: gitHub - GitHubOrg: dotnet - MirrorRepo: '' - MirrorBranch: main - condition: '' - JobNameSuffix: '' - is1ESPipeline: '' -jobs: -- job: OneLocBuild${{ parameters.JobNameSuffix }} - - dependsOn: ${{ parameters.dependsOn }} - - displayName: OneLocBuild${{ parameters.JobNameSuffix }} - - variables: - - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat - - name: _GenerateLocProjectArguments - value: -SourcesDirectory ${{ parameters.SourcesDirectory }} - -LanguageSet "${{ parameters.LanguageSet }}" - -CreateNeutralXlfs - - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}: - - name: _GenerateLocProjectArguments - value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson - - template: /eng/common/core-templates/variables/pool-providers.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - - ${{ if ne(parameters.pool, '') }}: - pool: ${{ parameters.pool }} - ${{ if eq(parameters.pool, '') }}: - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: AzurePipelines-EO - image: 1ESPT-Windows2022 - demands: Cmd - os: windows - # If it's not devdiv, it's dnceng - ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: - name: $(DncEngInternalBuildPool) - image: 1es-windows-2022 - os: windows - - steps: - - ${{ if eq(parameters.is1ESPipeline, '') }}: - - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error - - - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}: - - task: Powershell@2 - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1 - arguments: $(_GenerateLocProjectArguments) - displayName: Generate LocProject.json - condition: ${{ parameters.condition }} - - - task: OneLocBuild@2 - displayName: OneLocBuild - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - inputs: - locProj: eng/Localize/LocProject.json - outDir: $(Build.ArtifactStagingDirectory) - lclSource: ${{ parameters.LclSource }} - lclPackageId: ${{ parameters.LclPackageId }} - isCreatePrSelected: ${{ parameters.CreatePr }} - isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }} - ${{ if eq(parameters.CreatePr, true) }}: - isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }} - isShouldReusePrSelected: ${{ parameters.ReusePr }} - packageSourceAuth: patAuth - patVariable: ${{ parameters.CeapexPat }} - ${{ if eq(parameters.RepoType, 'gitHub') }}: - repoType: ${{ parameters.RepoType }} - gitHubPatVariable: "${{ parameters.GithubPat }}" - ${{ if ne(parameters.MirrorRepo, '') }}: - isMirrorRepoSelected: true - gitHubOrganization: ${{ parameters.GitHubOrg }} - mirrorRepo: ${{ parameters.MirrorRepo }} - mirrorBranch: ${{ parameters.MirrorBranch }} - condition: ${{ parameters.condition }} - - - template: /eng/common/core-templates/steps/publish-build-artifacts.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - args: - displayName: Publish Localization Files - pathToPublish: '$(Build.ArtifactStagingDirectory)/loc' - publishLocation: Container - artifactName: Loc - condition: ${{ parameters.condition }} - - - template: /eng/common/core-templates/steps/publish-build-artifacts.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - args: - displayName: Publish LocProject.json - pathToPublish: '$(Build.SourcesDirectory)/eng/Localize/' - publishLocation: Container - artifactName: Loc - condition: ${{ parameters.condition }} diff --git a/eng/common/core-templates/job/publish-build-assets.yml b/eng/common/core-templates/job/publish-build-assets.yml deleted file mode 100644 index d5303229c..000000000 --- a/eng/common/core-templates/job/publish-build-assets.yml +++ /dev/null @@ -1,194 +0,0 @@ -parameters: - configuration: 'Debug' - - # Optional: condition for the job to run - condition: '' - - # Optional: 'true' if future jobs should run even if this job fails - continueOnError: false - - # Optional: dependencies of the job - dependsOn: '' - - # Optional: Include PublishBuildArtifacts task - enablePublishBuildArtifacts: false - - # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool - pool: {} - - # Optional: should run as a public build even in the internal project - # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. - runAsPublic: false - - # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing - publishAssetsImmediately: false - - artifactsPublishingAdditionalParameters: '' - - signingValidationAdditionalParameters: '' - - is1ESPipeline: '' - - # Optional: 🌤️ or not the build has assets it wants to publish to BAR - isAssetlessBuild: false - - # Optional, publishing version - publishingVersion: 3 - - # Optional: A minimatch pattern for the asset manifests to publish to BAR - assetManifestsPattern: '*/manifests/**/*.xml' - -jobs: -- job: Asset_Registry_Publish - - dependsOn: ${{ parameters.dependsOn }} - timeoutInMinutes: 150 - - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: - displayName: Publish Assets - ${{ else }}: - displayName: Publish to Build Asset Registry - - variables: - - template: /eng/common/core-templates/variables/pool-providers.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - group: Publish-Build-Assets - - group: AzureDevOps-Artifact-Feeds-Pats - - name: runCodesignValidationInjection - value: false - # unconditional - needed for logs publishing (redactor tool version) - - template: /eng/common/core-templates/post-build/common-variables.yml - - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: AzurePipelines-EO - image: 1ESPT-Windows2022 - demands: Cmd - os: windows - # If it's not devdiv, it's dnceng - ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: - name: NetCore1ESPool-Publishing-Internal - image: windows.vs2019.amd64 - os: windows - steps: - - ${{ if eq(parameters.is1ESPipeline, '') }}: - - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error - - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - checkout: self - fetchDepth: 3 - clean: true - - - ${{ if eq(parameters.isAssetlessBuild, 'false') }}: - - ${{ if eq(parameters.publishingVersion, 3) }}: - - task: DownloadPipelineArtifact@2 - displayName: Download Asset Manifests - inputs: - artifactName: AssetManifests - targetPath: '$(Build.StagingDirectory)/AssetManifests' - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} - - ${{ if eq(parameters.publishingVersion, 4) }}: - - task: DownloadPipelineArtifact@2 - displayName: Download V4 asset manifests - inputs: - itemPattern: '*/manifests/**/*.xml' - targetPath: '$(Build.StagingDirectory)/AllAssetManifests' - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} - - task: CopyFiles@2 - displayName: Copy V4 asset manifests to AssetManifests - inputs: - SourceFolder: '$(Build.StagingDirectory)/AllAssetManifests' - Contents: ${{ parameters.assetManifestsPattern }} - TargetFolder: '$(Build.StagingDirectory)/AssetManifests' - flattenFolders: true - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} - - - task: NuGetAuthenticate@1 - - - task: AzureCLI@2 - displayName: Publish Build Assets - inputs: - azureSubscription: "Darc: Maestro Production" - scriptType: ps - scriptLocation: scriptPath - scriptPath: $(Build.SourcesDirectory)/eng/common/sdk-task.ps1 - arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet - /p:ManifestsPath='$(Build.StagingDirectory)/AssetManifests' - /p:IsAssetlessBuild=${{ parameters.isAssetlessBuild }} - /p:MaestroApiEndpoint=https://maestro.dot.net - /p:OfficialBuildId=$(Build.BuildNumber) - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} - - - task: powershell@2 - displayName: Create ReleaseConfigs Artifact - inputs: - targetType: inline - script: | - New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force - $filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt" - Add-Content -Path $filePath -Value $(BARBuildId) - Add-Content -Path $filePath -Value "$(DefaultChannels)" - Add-Content -Path $filePath -Value $(IsStableBuild) - - $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt" - if (Test-Path -Path $symbolExclusionfile) - { - Write-Host "SymbolExclusionFile exists" - Copy-Item -Path $symbolExclusionfile -Destination "$(Build.StagingDirectory)/ReleaseConfigs" - } - - - ${{ if eq(parameters.publishingVersion, 4) }}: - - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - args: - targetPath: '$(Build.ArtifactStagingDirectory)/MergedManifest.xml' - artifactName: AssetManifests - displayName: 'Publish Merged Manifest' - retryCountOnTaskFailure: 10 # for any logs being locked - sbomEnabled: false # we don't need SBOM for logs - - - template: /eng/common/core-templates/steps/publish-build-artifacts.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - args: - displayName: Publish ReleaseConfigs Artifact - pathToPublish: '$(Build.StagingDirectory)/ReleaseConfigs' - publishLocation: Container - artifactName: ReleaseConfigs - - - ${{ if or(eq(parameters.publishAssetsImmediately, 'true'), eq(parameters.isAssetlessBuild, 'true')) }}: - - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - is1ESPipeline: ${{ parameters.is1ESPipeline }} - - - task: AzureCLI@2 - displayName: Publish Using Darc - inputs: - azureSubscription: "Darc: Maestro Production" - scriptType: ps - scriptLocation: scriptPath - scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 - arguments: > - -BuildId $(BARBuildId) - -PublishingInfraVersion 3 - -AzdoToken '$(System.AccessToken)' - -WaitPublishingFinish true - -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' - -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' - -SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}' - - - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: - - template: /eng/common/core-templates/steps/publish-logs.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - JobLabel: 'Publish_Artifacts_Logs' diff --git a/eng/common/core-templates/job/source-build.yml b/eng/common/core-templates/job/source-build.yml deleted file mode 100644 index d805d5fae..000000000 --- a/eng/common/core-templates/job/source-build.yml +++ /dev/null @@ -1,96 +0,0 @@ -parameters: - # This template adds arcade-powered source-build to CI. The template produces a server job with a - # default ID 'Source_Build_Complete' to put in a dependency list if necessary. - - # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed. - jobNamePrefix: 'Source_Build' - - # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for - # managed-only repositories. This is an object with these properties: - # - # name: '' - # The name of the job. This is included in the job ID. - # targetRID: '' - # The name of the target RID to use, instead of the one auto-detected by Arcade. - # portableBuild: false - # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than - # linux-x64), and compiling against distro-provided packages rather than portable ones. The - # default is portable mode. - # skipPublishValidation: false - # Disables publishing validation. By default, a check is performed to ensure no packages are - # published by source-build. - # container: '' - # A container to use. Runs in docker. - # pool: {} - # A pool to use. Runs directly on an agent. - # buildScript: '' - # Specifies the build script to invoke to perform the build in the repo. The default - # './build.sh' should work for typical Arcade repositories, but this is customizable for - # difficult situations. - # buildArguments: '' - # Specifies additional build arguments to pass to the build script. - # jobProperties: {} - # A list of job properties to inject at the top level, for potential extensibility beyond - # container and pool. - platform: {} - - is1ESPipeline: '' - - # If set to true and running on a non-public project, - # Internal nuget and blob storage locations will be enabled. - # This is not enabled by default because many repositories do not need internal sources - # and do not need to have the required service connections approved in the pipeline. - enableInternalSources: false - -jobs: -- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }} - displayName: Source-Build (${{ parameters.platform.name }}) - - ${{ each property in parameters.platform.jobProperties }}: - ${{ property.key }}: ${{ property.value }} - - ${{ if ne(parameters.platform.container, '') }}: - container: ${{ parameters.platform.container }} - - ${{ if eq(parameters.platform.pool, '') }}: - # The default VM host AzDO pool. This should be capable of running Docker containers: almost all - # source-build builds run in Docker, including the default managed platform. - # /eng/common/core-templates/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic - ${{ if eq(parameters.is1ESPipeline, 'true') }}: - pool: - ${{ if eq(variables['System.TeamProject'], 'public') }}: - name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')] - demands: ImageOverride -equals build.ubuntu.2004.amd64 - ${{ if eq(variables['System.TeamProject'], 'internal') }}: - name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')] - image: 1es-mariner-2 - os: linux - ${{ else }}: - pool: - ${{ if eq(variables['System.TeamProject'], 'public') }}: - name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')] - demands: ImageOverride -equals Build.Ubuntu.2204.Amd64.Open - ${{ if eq(variables['System.TeamProject'], 'internal') }}: - name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')] - demands: ImageOverride -equals Build.Ubuntu.2204.Amd64 - ${{ if ne(parameters.platform.pool, '') }}: - pool: ${{ parameters.platform.pool }} - - workspace: - clean: all - - steps: - - ${{ if eq(parameters.is1ESPipeline, '') }}: - - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error - - - ${{ if eq(parameters.enableInternalSources, true) }}: - - template: /eng/common/core-templates/steps/enable-internal-sources.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - - template: /eng/common/core-templates/steps/enable-internal-runtimes.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - - template: /eng/common/core-templates/steps/source-build.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - platform: ${{ parameters.platform }} diff --git a/eng/common/core-templates/job/source-index-stage1.yml b/eng/common/core-templates/job/source-index-stage1.yml deleted file mode 100644 index 30530359a..000000000 --- a/eng/common/core-templates/job/source-index-stage1.yml +++ /dev/null @@ -1,44 +0,0 @@ -parameters: - runAsPublic: false - sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci" - preSteps: [] - binlogPath: artifacts/log/Debug/Build.binlog - condition: '' - dependsOn: '' - pool: '' - is1ESPipeline: '' - -jobs: -- job: SourceIndexStage1 - dependsOn: ${{ parameters.dependsOn }} - condition: ${{ parameters.condition }} - variables: - - name: BinlogPath - value: ${{ parameters.binlogPath }} - - template: /eng/common/core-templates/variables/pool-providers.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - - ${{ if ne(parameters.pool, '') }}: - pool: ${{ parameters.pool }} - ${{ if eq(parameters.pool, '') }}: - pool: - ${{ if eq(variables['System.TeamProject'], 'public') }}: - name: $(DncEngPublicBuildPool) - image: windows.vs2022.amd64.open - ${{ if eq(variables['System.TeamProject'], 'internal') }}: - name: $(DncEngInternalBuildPool) - image: windows.vs2022.amd64 - - steps: - - ${{ if eq(parameters.is1ESPipeline, '') }}: - - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error - - - ${{ each preStep in parameters.preSteps }}: - - ${{ preStep }} - - script: ${{ parameters.sourceIndexBuildCommand }} - displayName: Build Repository - - - template: /eng/common/core-templates/steps/source-index-stage1-publish.yml - parameters: - binLogPath: ${{ parameters.binLogPath }} \ No newline at end of file diff --git a/eng/common/core-templates/jobs/codeql-build.yml b/eng/common/core-templates/jobs/codeql-build.yml deleted file mode 100644 index 693b00b37..000000000 --- a/eng/common/core-templates/jobs/codeql-build.yml +++ /dev/null @@ -1,32 +0,0 @@ -parameters: - # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md - continueOnError: false - # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job - jobs: [] - # Optional: if specified, restore and use this version of Guardian instead of the default. - overrideGuardianVersion: '' - is1ESPipeline: '' - -jobs: -- template: /eng/common/core-templates/jobs/jobs.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - enableMicrobuild: false - enablePublishBuildArtifacts: false - enablePublishTestResults: false - enablePublishBuildAssets: false - enableTelemetry: true - - variables: - - group: Publish-Build-Assets - # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in - # sync with the packages.config file. - - name: DefaultGuardianVersion - value: 0.109.0 - - name: GuardianPackagesConfigFile - value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config - - name: GuardianVersion - value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} - - jobs: ${{ parameters.jobs }} - diff --git a/eng/common/core-templates/jobs/jobs.yml b/eng/common/core-templates/jobs/jobs.yml deleted file mode 100644 index bf35b78fa..000000000 --- a/eng/common/core-templates/jobs/jobs.yml +++ /dev/null @@ -1,119 +0,0 @@ -parameters: - # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md - continueOnError: false - - # Optional: Include PublishBuildArtifacts task - enablePublishBuildArtifacts: false - - # Optional: Enable running the source-build jobs to build repo from source - enableSourceBuild: false - - # Optional: Parameters for source-build template. - # See /eng/common/core-templates/jobs/source-build.yml for options - sourceBuildParameters: [] - - graphFileGeneration: - # Optional: Enable generating the graph files at the end of the build - enabled: false - # Optional: Include toolset dependencies in the generated graph files - includeToolset: false - - # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job - jobs: [] - - # Optional: Override automatically derived dependsOn value for "publish build assets" job - publishBuildAssetsDependsOn: '' - - # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage. - publishAssetsImmediately: false - - # Optional: 🌤️ or not the build has assets it wants to publish to BAR - isAssetlessBuild: false - - # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml) - artifactsPublishingAdditionalParameters: '' - signingValidationAdditionalParameters: '' - - # Optional: should run as a public build even in the internal project - # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. - runAsPublic: false - - enableSourceIndex: false - sourceIndexParams: {} - - artifacts: {} - is1ESPipeline: '' - -# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, -# and some (Microbuild) should only be applied to non-PR cases for internal builds. - -jobs: -- ${{ each job in parameters.jobs }}: - - ${{ if eq(parameters.is1ESPipeline, 'true') }}: - - template: /eng/common/templates-official/job/job.yml - parameters: - # pass along parameters - ${{ each parameter in parameters }}: - ${{ if ne(parameter.key, 'jobs') }}: - ${{ parameter.key }}: ${{ parameter.value }} - - # pass along job properties - ${{ each property in job }}: - ${{ if ne(property.key, 'job') }}: - ${{ property.key }}: ${{ property.value }} - - name: ${{ job.job }} - - - ${{ else }}: - - template: /eng/common/templates/job/job.yml - parameters: - # pass along parameters - ${{ each parameter in parameters }}: - ${{ if ne(parameter.key, 'jobs') }}: - ${{ parameter.key }}: ${{ parameter.value }} - - # pass along job properties - ${{ each property in job }}: - ${{ if ne(property.key, 'job') }}: - ${{ property.key }}: ${{ property.value }} - - name: ${{ job.job }} - -- ${{ if eq(parameters.enableSourceBuild, true) }}: - - template: /eng/common/core-templates/jobs/source-build.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - allCompletedJobId: Source_Build_Complete - ${{ each parameter in parameters.sourceBuildParameters }}: - ${{ parameter.key }}: ${{ parameter.value }} - -- ${{ if eq(parameters.enableSourceIndex, 'true') }}: - - template: ../job/source-index-stage1.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - runAsPublic: ${{ parameters.runAsPublic }} - ${{ each parameter in parameters.sourceIndexParams }}: - ${{ parameter.key }}: ${{ parameter.value }} - -- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, ''), eq(parameters.isAssetlessBuild, true)) }}: - - template: ../job/publish-build-assets.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - continueOnError: ${{ parameters.continueOnError }} - dependsOn: - - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}: - - ${{ each job in parameters.publishBuildAssetsDependsOn }}: - - ${{ job.job }} - - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}: - - ${{ each job in parameters.jobs }}: - - ${{ job.job }} - - ${{ if eq(parameters.enableSourceBuild, true) }}: - - Source_Build_Complete - - runAsPublic: ${{ parameters.runAsPublic }} - publishAssetsImmediately: ${{ or(parameters.publishAssetsImmediately, parameters.isAssetlessBuild) }} - isAssetlessBuild: ${{ parameters.isAssetlessBuild }} - enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }} diff --git a/eng/common/core-templates/jobs/source-build.yml b/eng/common/core-templates/jobs/source-build.yml deleted file mode 100644 index df24c948b..000000000 --- a/eng/common/core-templates/jobs/source-build.yml +++ /dev/null @@ -1,58 +0,0 @@ -parameters: - # This template adds arcade-powered source-build to CI. A job is created for each platform, as - # well as an optional server job that completes when all platform jobs complete. - - # The name of the "join" job for all source-build platforms. If set to empty string, the job is - # not included. Existing repo pipelines can use this job depend on all source-build jobs - # completing without maintaining a separate list of every single job ID: just depend on this one - # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'. - allCompletedJobId: '' - - # See /eng/common/core-templates/job/source-build.yml - jobNamePrefix: 'Source_Build' - - # This is the default platform provided by Arcade, intended for use by a managed-only repo. - defaultManagedPlatform: - name: 'Managed' - container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream-10-amd64' - - # Defines the platforms on which to run build jobs. One job is created for each platform, and the - # object in this array is sent to the job template as 'platform'. If no platforms are specified, - # one job runs on 'defaultManagedPlatform'. - platforms: [] - - is1ESPipeline: '' - - # If set to true and running on a non-public project, - # Internal nuget and blob storage locations will be enabled. - # This is not enabled by default because many repositories do not need internal sources - # and do not need to have the required service connections approved in the pipeline. - enableInternalSources: false - -jobs: - -- ${{ if ne(parameters.allCompletedJobId, '') }}: - - job: ${{ parameters.allCompletedJobId }} - displayName: Source-Build Complete - pool: server - dependsOn: - - ${{ each platform in parameters.platforms }}: - - ${{ parameters.jobNamePrefix }}_${{ platform.name }} - - ${{ if eq(length(parameters.platforms), 0) }}: - - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }} - -- ${{ each platform in parameters.platforms }}: - - template: /eng/common/core-templates/job/source-build.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - jobNamePrefix: ${{ parameters.jobNamePrefix }} - platform: ${{ platform }} - enableInternalSources: ${{ parameters.enableInternalSources }} - -- ${{ if eq(length(parameters.platforms), 0) }}: - - template: /eng/common/core-templates/job/source-build.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - jobNamePrefix: ${{ parameters.jobNamePrefix }} - platform: ${{ parameters.defaultManagedPlatform }} - enableInternalSources: ${{ parameters.enableInternalSources }} diff --git a/eng/common/core-templates/post-build/common-variables.yml b/eng/common/core-templates/post-build/common-variables.yml deleted file mode 100644 index d5627a994..000000000 --- a/eng/common/core-templates/post-build/common-variables.yml +++ /dev/null @@ -1,22 +0,0 @@ -variables: - - group: Publish-Build-Assets - - # Whether the build is internal or not - - name: IsInternalBuild - value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }} - - # Default Maestro++ API Endpoint and API Version - - name: MaestroApiEndPoint - value: "https://maestro.dot.net/" - - name: MaestroApiVersion - value: "2020-02-20" - - - name: SourceLinkCLIVersion - value: 3.0.0 - - name: SymbolToolVersion - value: 1.0.1 - - name: BinlogToolVersion - value: 1.0.11 - - - name: runCodesignValidationInjection - value: false diff --git a/eng/common/core-templates/post-build/post-build.yml b/eng/common/core-templates/post-build/post-build.yml deleted file mode 100644 index a151fd811..000000000 --- a/eng/common/core-templates/post-build/post-build.yml +++ /dev/null @@ -1,325 +0,0 @@ -parameters: - # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST. - # Publishing V1 is no longer supported - # Publishing V2 is no longer supported - # Publishing V3 is the default - - name: publishingInfraVersion - displayName: Which version of publishing should be used to promote the build definition? - type: number - default: 3 - values: - - 3 - - - name: BARBuildId - displayName: BAR Build Id - type: number - default: 0 - - - name: PromoteToChannelIds - displayName: Channel to promote BARBuildId to - type: string - default: '' - - - name: enableSourceLinkValidation - displayName: Enable SourceLink validation - type: boolean - default: false - - - name: enableSigningValidation - displayName: Enable signing validation - type: boolean - default: true - - - name: enableSymbolValidation - displayName: Enable symbol validation - type: boolean - default: false - - - name: enableNugetValidation - displayName: Enable NuGet validation - type: boolean - default: true - - - name: publishInstallersAndChecksums - displayName: Publish installers and checksums - type: boolean - default: true - - - name: requireDefaultChannels - displayName: Fail the build if there are no default channel(s) registrations for the current build - type: boolean - default: false - - - name: SDLValidationParameters - type: object - default: - enable: false - publishGdn: false - continueOnError: false - params: '' - artifactNames: '' - downloadArtifacts: true - - - name: isAssetlessBuild - type: boolean - displayName: Is Assetless Build - default: false - - # These parameters let the user customize the call to sdk-task.ps1 for publishing - # symbols & general artifacts as well as for signing validation - - name: symbolPublishingAdditionalParameters - displayName: Symbol publishing additional parameters - type: string - default: '' - - - name: artifactsPublishingAdditionalParameters - displayName: Artifact publishing additional parameters - type: string - default: '' - - - name: signingValidationAdditionalParameters - displayName: Signing validation additional parameters - type: string - default: '' - - # Which stages should finish execution before post-build stages start - - name: validateDependsOn - type: object - default: - - build - - - name: publishDependsOn - type: object - default: - - Validate - - # Optional: Call asset publishing rather than running in a separate stage - - name: publishAssetsImmediately - type: boolean - default: false - - - name: is1ESPipeline - type: boolean - default: false - -stages: -- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: - - stage: Validate - dependsOn: ${{ parameters.validateDependsOn }} - displayName: Validate Build Assets - variables: - - template: /eng/common/core-templates/post-build/common-variables.yml - - template: /eng/common/core-templates/variables/pool-providers.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - jobs: - - job: - displayName: NuGet Validation - condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true')) - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: AzurePipelines-EO - image: 1ESPT-Windows2022 - demands: Cmd - os: windows - # If it's not devdiv, it's dnceng - ${{ else }}: - ${{ if eq(parameters.is1ESPipeline, true) }}: - name: $(DncEngInternalBuildPool) - image: windows.vs2022.amd64 - os: windows - ${{ else }}: - name: $(DncEngInternalBuildPool) - demands: ImageOverride -equals windows.vs2022.amd64 - - steps: - - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - is1ESPipeline: ${{ parameters.is1ESPipeline }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Package Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: PackageArtifacts - checkDownloadedFiles: true - - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1 - arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ - - - job: - displayName: Signing Validation - condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true')) - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: AzurePipelines-EO - image: 1ESPT-Windows2022 - demands: Cmd - os: windows - # If it's not devdiv, it's dnceng - ${{ else }}: - ${{ if eq(parameters.is1ESPipeline, true) }}: - name: $(DncEngInternalBuildPool) - image: 1es-windows-2022 - os: windows - ${{ else }}: - name: $(DncEngInternalBuildPool) - demands: ImageOverride -equals windows.vs2022.amd64 - steps: - - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - is1ESPipeline: ${{ parameters.is1ESPipeline }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Package Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: PackageArtifacts - checkDownloadedFiles: true - - # This is necessary whenever we want to publish/restore to an AzDO private feed - # Since sdk-task.ps1 tries to restore packages we need to do this authentication here - # otherwise it'll complain about accessing a private feed. - - task: NuGetAuthenticate@1 - displayName: 'Authenticate to AzDO Feeds' - - # Signing validation will optionally work with the buildmanifest file which is downloaded from - # Azure DevOps above. - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: eng\common\sdk-task.ps1 - arguments: -task SigningValidation -restore -msbuildEngine vs - /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' - /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt' - ${{ parameters.signingValidationAdditionalParameters }} - - - template: /eng/common/core-templates/steps/publish-logs.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - StageLabel: 'Validation' - JobLabel: 'Signing' - BinlogToolVersion: $(BinlogToolVersion) - - - job: - displayName: SourceLink Validation - condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true') - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: AzurePipelines-EO - image: 1ESPT-Windows2022 - demands: Cmd - os: windows - # If it's not devdiv, it's dnceng - ${{ else }}: - ${{ if eq(parameters.is1ESPipeline, true) }}: - name: $(DncEngInternalBuildPool) - image: 1es-windows-2022 - os: windows - ${{ else }}: - name: $(DncEngInternalBuildPool) - demands: ImageOverride -equals windows.vs2022.amd64 - steps: - - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - is1ESPipeline: ${{ parameters.is1ESPipeline }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Blob Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: BlobArtifacts - checkDownloadedFiles: true - - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1 - arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ - -ExtractPath $(Agent.BuildDirectory)/Extract/ - -GHRepoName $(Build.Repository.Name) - -GHCommit $(Build.SourceVersion) - -SourcelinkCliVersion $(SourceLinkCLIVersion) - continueOnError: true - -- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}: - - stage: publish_using_darc - ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: - dependsOn: ${{ parameters.publishDependsOn }} - ${{ else }}: - dependsOn: ${{ parameters.validateDependsOn }} - displayName: Publish using Darc - variables: - - template: /eng/common/core-templates/post-build/common-variables.yml - - template: /eng/common/core-templates/variables/pool-providers.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - jobs: - - job: - displayName: Publish Using Darc - timeoutInMinutes: 120 - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: AzurePipelines-EO - image: 1ESPT-Windows2022 - demands: Cmd - os: windows - # If it's not devdiv, it's dnceng - ${{ else }}: - ${{ if eq(parameters.is1ESPipeline, true) }}: - name: NetCore1ESPool-Publishing-Internal - image: windows.vs2019.amd64 - os: windows - ${{ else }}: - name: NetCore1ESPool-Publishing-Internal - demands: ImageOverride -equals windows.vs2019.amd64 - steps: - - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - is1ESPipeline: ${{ parameters.is1ESPipeline }} - - - task: NuGetAuthenticate@1 - - - task: AzureCLI@2 - displayName: Publish Using Darc - inputs: - azureSubscription: "Darc: Maestro Production" - scriptType: ps - scriptLocation: scriptPath - scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 - arguments: > - -BuildId $(BARBuildId) - -PublishingInfraVersion ${{ parameters.publishingInfraVersion }} - -AzdoToken '$(System.AccessToken)' - -WaitPublishingFinish true - -RequireDefaultChannels ${{ parameters.requireDefaultChannels }} - -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' - -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' - -SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}' diff --git a/eng/common/core-templates/post-build/setup-maestro-vars.yml b/eng/common/core-templates/post-build/setup-maestro-vars.yml deleted file mode 100644 index f7602980d..000000000 --- a/eng/common/core-templates/post-build/setup-maestro-vars.yml +++ /dev/null @@ -1,74 +0,0 @@ -parameters: - BARBuildId: '' - PromoteToChannelIds: '' - is1ESPipeline: '' - -steps: - - ${{ if eq(parameters.is1ESPipeline, '') }}: - - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error - - - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}: - - task: DownloadBuildArtifacts@0 - displayName: Download Release Configs - inputs: - buildType: current - artifactName: ReleaseConfigs - checkDownloadedFiles: true - - - task: AzureCLI@2 - name: setReleaseVars - displayName: Set Release Configs Vars - inputs: - azureSubscription: "Darc: Maestro Production" - scriptType: pscore - scriptLocation: inlineScript - inlineScript: | - try { - if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') { - $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt - - $BarId = $Content | Select -Index 0 - $Channels = $Content | Select -Index 1 - $IsStableBuild = $Content | Select -Index 2 - - $AzureDevOpsProject = $Env:System_TeamProject - $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId - $AzureDevOpsBuildId = $Env:Build_BuildId - } - else { - . $(Build.SourcesDirectory)\eng\common\tools.ps1 - $darc = Get-Darc - $buildInfo = & $darc get-build ` - --id ${{ parameters.BARBuildId }} ` - --extended ` - --output-format json ` - --ci ` - | convertFrom-Json - - $BarId = ${{ parameters.BARBuildId }} - $Channels = $Env:PromoteToMaestroChannels -split "," - $Channels = $Channels -join "][" - $Channels = "[$Channels]" - - $IsStableBuild = $buildInfo.stable - $AzureDevOpsProject = $buildInfo.azureDevOpsProject - $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId - $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId - } - - Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId" - Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels" - Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild" - - Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject" - Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId" - Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId" - } - catch { - Write-Host $_ - Write-Host $_.Exception - Write-Host $_.ScriptStackTrace - exit 1 - } - env: - PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }} diff --git a/eng/common/core-templates/steps/cleanup-microbuild.yml b/eng/common/core-templates/steps/cleanup-microbuild.yml deleted file mode 100644 index c0fdcd337..000000000 --- a/eng/common/core-templates/steps/cleanup-microbuild.yml +++ /dev/null @@ -1,28 +0,0 @@ -parameters: - # Enable cleanup tasks for MicroBuild - enableMicrobuild: false - # Enable cleanup tasks for MicroBuild on Mac and Linux - # Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT' - enableMicrobuildForMacAndLinux: false - continueOnError: false - -steps: - - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - - task: MicroBuildCleanup@1 - displayName: Execute Microbuild cleanup tasks - condition: and( - always(), - or( - and( - eq(variables['Agent.Os'], 'Windows_NT'), - in(variables['_SignType'], 'real', 'test') - ), - and( - ${{ eq(parameters.enableMicrobuildForMacAndLinux, true) }}, - ne(variables['Agent.Os'], 'Windows_NT'), - eq(variables['_SignType'], 'real') - ) - )) - continueOnError: ${{ parameters.continueOnError }} - env: - TeamName: $(_TeamName) diff --git a/eng/common/core-templates/steps/component-governance.yml b/eng/common/core-templates/steps/component-governance.yml deleted file mode 100644 index cf0649aa9..000000000 --- a/eng/common/core-templates/steps/component-governance.yml +++ /dev/null @@ -1,16 +0,0 @@ -parameters: - disableComponentGovernance: false - componentGovernanceIgnoreDirectories: '' - is1ESPipeline: false - displayName: 'Component Detection' - -steps: -- ${{ if eq(parameters.disableComponentGovernance, 'true') }}: - - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true" - displayName: Set skipComponentGovernanceDetection variable -- ${{ if ne(parameters.disableComponentGovernance, 'true') }}: - - task: ComponentGovernanceComponentDetection@0 - continueOnError: true - displayName: ${{ parameters.displayName }} - inputs: - ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} diff --git a/eng/common/core-templates/steps/enable-internal-runtimes.yml b/eng/common/core-templates/steps/enable-internal-runtimes.yml deleted file mode 100644 index 6bdbf62ac..000000000 --- a/eng/common/core-templates/steps/enable-internal-runtimes.yml +++ /dev/null @@ -1,32 +0,0 @@ -# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64' -# variable with the base64-encoded SAS token, by default - -parameters: -- name: federatedServiceConnection - type: string - default: 'dotnetbuilds-internal-read' -- name: outputVariableName - type: string - default: 'dotnetbuilds-internal-container-read-token-base64' -- name: expiryInHours - type: number - default: 1 -- name: base64Encode - type: boolean - default: true -- name: is1ESPipeline - type: boolean - default: false - -steps: -- ${{ if ne(variables['System.TeamProject'], 'public') }}: - - template: /eng/common/core-templates/steps/get-delegation-sas.yml - parameters: - federatedServiceConnection: ${{ parameters.federatedServiceConnection }} - outputVariableName: ${{ parameters.outputVariableName }} - expiryInHours: ${{ parameters.expiryInHours }} - base64Encode: ${{ parameters.base64Encode }} - storageAccount: dotnetbuilds - container: internal - permissions: rl - is1ESPipeline: ${{ parameters.is1ESPipeline }} \ No newline at end of file diff --git a/eng/common/core-templates/steps/enable-internal-sources.yml b/eng/common/core-templates/steps/enable-internal-sources.yml deleted file mode 100644 index 64f881bff..000000000 --- a/eng/common/core-templates/steps/enable-internal-sources.yml +++ /dev/null @@ -1,47 +0,0 @@ -parameters: -# This is the Azure federated service connection that we log into to get an access token. -- name: nugetFederatedServiceConnection - type: string - default: 'dnceng-artifacts-feeds-read' -- name: is1ESPipeline - type: boolean - default: false -# Legacy parameters to allow for PAT usage -- name: legacyCredential - type: string - default: '' - -steps: -- ${{ if ne(variables['System.TeamProject'], 'public') }}: - - ${{ if ne(parameters.legacyCredential, '') }}: - - task: PowerShell@2 - displayName: Setup Internal Feeds - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 - arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token - env: - Token: ${{ parameters.legacyCredential }} - # If running on dnceng (internal project), just use the default behavior for NuGetAuthenticate. - # If running on DevDiv, NuGetAuthenticate is not really an option. It's scoped to a single feed, and we have many feeds that - # may be added. Instead, we'll use the traditional approach (add cred to nuget.config), but use an account token. - - ${{ else }}: - - ${{ if eq(variables['System.TeamProject'], 'internal') }}: - - task: PowerShell@2 - displayName: Setup Internal Feeds - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 - arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config - - ${{ else }}: - - template: /eng/common/templates/steps/get-federated-access-token.yml - parameters: - federatedServiceConnection: ${{ parameters.nugetFederatedServiceConnection }} - outputVariableName: 'dnceng-artifacts-feeds-read-access-token' - - task: PowerShell@2 - displayName: Setup Internal Feeds - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 - arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $(dnceng-artifacts-feeds-read-access-token) - # This is required in certain scenarios to install the ADO credential provider. - # It installed by default in some msbuild invocations (e.g. VS msbuild), but needs to be installed for others - # (e.g. dotnet msbuild). - - task: NuGetAuthenticate@1 diff --git a/eng/common/core-templates/steps/generate-sbom.yml b/eng/common/core-templates/steps/generate-sbom.yml deleted file mode 100644 index 44a9636cd..000000000 --- a/eng/common/core-templates/steps/generate-sbom.yml +++ /dev/null @@ -1,54 +0,0 @@ -# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated. -# PackageName - The name of the package this SBOM represents. -# PackageVersion - The version of the package this SBOM represents. -# ManifestDirPath - The path of the directory where the generated manifest files will be placed -# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector. - -parameters: - PackageVersion: 10.0.0 - BuildDropPath: '$(Build.SourcesDirectory)/artifacts' - PackageName: '.NET' - ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom - IgnoreDirectories: '' - sbomContinueOnError: true - is1ESPipeline: false - # disable publishArtifacts if some other step is publishing the artifacts (like job.yml). - publishArtifacts: true - -steps: -- task: PowerShell@2 - displayName: Prep for SBOM generation in (Non-linux) - condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin')) - inputs: - filePath: ./eng/common/generate-sbom-prep.ps1 - arguments: ${{parameters.manifestDirPath}} - -# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461 -- script: | - chmod +x ./eng/common/generate-sbom-prep.sh - ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}} - displayName: Prep for SBOM generation in (Linux) - condition: eq(variables['Agent.Os'], 'Linux') - continueOnError: ${{ parameters.sbomContinueOnError }} - -- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0 - displayName: 'Generate SBOM manifest' - continueOnError: ${{ parameters.sbomContinueOnError }} - inputs: - PackageName: ${{ parameters.packageName }} - BuildDropPath: ${{ parameters.buildDropPath }} - PackageVersion: ${{ parameters.packageVersion }} - ManifestDirPath: ${{ parameters.manifestDirPath }}/$(ARTIFACT_NAME) - ${{ if ne(parameters.IgnoreDirectories, '') }}: - AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}' - -- ${{ if eq(parameters.publishArtifacts, 'true')}}: - - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - args: - displayName: Publish SBOM manifest - continueOnError: ${{parameters.sbomContinueOnError}} - targetPath: '${{ parameters.manifestDirPath }}' - artifactName: $(ARTIFACT_NAME) - diff --git a/eng/common/core-templates/steps/get-delegation-sas.yml b/eng/common/core-templates/steps/get-delegation-sas.yml deleted file mode 100644 index d2901470a..000000000 --- a/eng/common/core-templates/steps/get-delegation-sas.yml +++ /dev/null @@ -1,46 +0,0 @@ -parameters: -- name: federatedServiceConnection - type: string -- name: outputVariableName - type: string -- name: expiryInHours - type: number - default: 1 -- name: base64Encode - type: boolean - default: false -- name: storageAccount - type: string -- name: container - type: string -- name: permissions - type: string - default: 'rl' -- name: is1ESPipeline - type: boolean - default: false - -steps: -- task: AzureCLI@2 - displayName: 'Generate delegation SAS Token for ${{ parameters.storageAccount }}/${{ parameters.container }}' - inputs: - azureSubscription: ${{ parameters.federatedServiceConnection }} - scriptType: 'pscore' - scriptLocation: 'inlineScript' - inlineScript: | - # Calculate the expiration of the SAS token and convert to UTC - $expiry = (Get-Date).AddHours(${{ parameters.expiryInHours }}).ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ") - - $sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv - - if ($LASTEXITCODE -ne 0) { - Write-Error "Failed to generate SAS token." - exit 1 - } - - if ('${{ parameters.base64Encode }}' -eq 'true') { - $sas = [Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($sas)) - } - - Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value" - Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true]$sas" diff --git a/eng/common/core-templates/steps/get-federated-access-token.yml b/eng/common/core-templates/steps/get-federated-access-token.yml deleted file mode 100644 index 3a4d4410c..000000000 --- a/eng/common/core-templates/steps/get-federated-access-token.yml +++ /dev/null @@ -1,42 +0,0 @@ -parameters: -- name: federatedServiceConnection - type: string -- name: outputVariableName - type: string -- name: is1ESPipeline - type: boolean -- name: stepName - type: string - default: 'getFederatedAccessToken' -- name: condition - type: string - default: '' -# Resource to get a token for. Common values include: -# - '499b84ac-1321-427f-aa17-267ca6975798' for Azure DevOps -# - 'https://storage.azure.com/' for storage -# Defaults to Azure DevOps -- name: resource - type: string - default: '499b84ac-1321-427f-aa17-267ca6975798' -- name: isStepOutputVariable - type: boolean - default: false - -steps: -- task: AzureCLI@2 - displayName: 'Getting federated access token for feeds' - name: ${{ parameters.stepName }} - ${{ if ne(parameters.condition, '') }}: - condition: ${{ parameters.condition }} - inputs: - azureSubscription: ${{ parameters.federatedServiceConnection }} - scriptType: 'pscore' - scriptLocation: 'inlineScript' - inlineScript: | - $accessToken = az account get-access-token --query accessToken --resource ${{ parameters.resource }} --output tsv - if ($LASTEXITCODE -ne 0) { - Write-Error "Failed to get access token for resource '${{ parameters.resource }}'" - exit 1 - } - Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value" - Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true;isOutput=${{ parameters.isStepOutputVariable }}]$accessToken" \ No newline at end of file diff --git a/eng/common/core-templates/steps/install-microbuild.yml b/eng/common/core-templates/steps/install-microbuild.yml deleted file mode 100644 index f3064a783..000000000 --- a/eng/common/core-templates/steps/install-microbuild.yml +++ /dev/null @@ -1,54 +0,0 @@ -parameters: - # Enable install tasks for MicroBuild - enableMicrobuild: false - # Enable install tasks for MicroBuild on Mac and Linux - # Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT' - enableMicrobuildForMacAndLinux: false - # Location of the MicroBuild output folder - microBuildOutputFolder: '$(Build.SourcesDirectory)' - continueOnError: false - -steps: - - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, 'true') }}: - # Needed to download the MicroBuild plugin nupkgs on Mac and Linux when nuget.exe is unavailable - - task: UseDotNet@2 - displayName: Install .NET 8.0 SDK for MicroBuild Plugin - inputs: - packageType: sdk - version: 8.0.x - installationPath: ${{ parameters.microBuildOutputFolder }}/.dotnet - workingDirectory: ${{ parameters.microBuildOutputFolder }} - condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT')) - - - task: MicroBuildSigningPlugin@4 - displayName: Install MicroBuild plugin - inputs: - signType: $(_SignType) - zipSources: false - feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json - ${{ if and(eq(parameters.enableMicrobuildForMacAndLinux, 'true'), ne(variables['Agent.Os'], 'Windows_NT')) }}: - azureSubscription: 'MicroBuild Signing Task (DevDiv)' - useEsrpCli: true - ${{ elseif eq(variables['System.TeamProject'], 'DevDiv') }}: - ConnectedPMEServiceName: 6cc74545-d7b9-4050-9dfa-ebefcc8961ea - ${{ else }}: - ConnectedPMEServiceName: 248d384a-b39b-46e3-8ad5-c2c210d5e7ca - env: - TeamName: $(_TeamName) - MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }} - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - continueOnError: ${{ parameters.continueOnError }} - condition: and( - succeeded(), - or( - and( - eq(variables['Agent.Os'], 'Windows_NT'), - in(variables['_SignType'], 'real', 'test') - ), - and( - ${{ eq(parameters.enableMicrobuildForMacAndLinux, true) }}, - ne(variables['Agent.Os'], 'Windows_NT'), - eq(variables['_SignType'], 'real') - ) - )) diff --git a/eng/common/core-templates/steps/publish-build-artifacts.yml b/eng/common/core-templates/steps/publish-build-artifacts.yml deleted file mode 100644 index f24ce3466..000000000 --- a/eng/common/core-templates/steps/publish-build-artifacts.yml +++ /dev/null @@ -1,20 +0,0 @@ -parameters: -- name: is1ESPipeline - type: boolean - default: false -- name: args - type: object - default: {} -steps: -- ${{ if ne(parameters.is1ESPipeline, true) }}: - - template: /eng/common/templates/steps/publish-build-artifacts.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - ${{ each parameter in parameters.args }}: - ${{ parameter.key }}: ${{ parameter.value }} -- ${{ else }}: - - template: /eng/common/templates-official/steps/publish-build-artifacts.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - ${{ each parameter in parameters.args }}: - ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/core-templates/steps/publish-logs.yml b/eng/common/core-templates/steps/publish-logs.yml deleted file mode 100644 index de24d0087..000000000 --- a/eng/common/core-templates/steps/publish-logs.yml +++ /dev/null @@ -1,61 +0,0 @@ -parameters: - StageLabel: '' - JobLabel: '' - CustomSensitiveDataList: '' - # A default - in case value from eng/common/core-templates/post-build/common-variables.yml is not passed - BinlogToolVersion: '1.0.11' - is1ESPipeline: false - -steps: -- task: Powershell@2 - displayName: Prepare Binlogs to Upload - inputs: - targetType: inline - script: | - New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ - Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ - continueOnError: true - condition: always() - -- task: PowerShell@2 - displayName: Redact Logs - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1 - # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml - # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt' - # If the file exists - sensitive data for redaction will be sourced from it - # (single entry per line, lines starting with '# ' are considered comments and skipped) - arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs' - -BinlogToolVersion ${{parameters.BinlogToolVersion}} - -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt' - '$(publishing-dnceng-devdiv-code-r-build-re)' - '$(MaestroAccessToken)' - '$(dn-bot-all-orgs-artifact-feeds-rw)' - '$(akams-client-id)' - '$(microsoft-symbol-server-pat)' - '$(symweb-symbol-server-pat)' - '$(dnceng-symbol-server-pat)' - '$(dn-bot-all-orgs-build-rw-code-rw)' - '$(System.AccessToken)' - ${{parameters.CustomSensitiveDataList}} - continueOnError: true - condition: always() - -- task: CopyFiles@2 - displayName: Gather post build logs - inputs: - SourceFolder: '$(Build.SourcesDirectory)/PostBuildLogs' - Contents: '**' - TargetFolder: '$(Build.ArtifactStagingDirectory)/PostBuildLogs' - condition: always() - -- template: /eng/common/core-templates/steps/publish-build-artifacts.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - args: - displayName: Publish Logs - pathToPublish: '$(Build.ArtifactStagingDirectory)/PostBuildLogs' - publishLocation: Container - artifactName: PostBuildLogs - continueOnError: true - condition: always() diff --git a/eng/common/core-templates/steps/publish-pipeline-artifacts.yml b/eng/common/core-templates/steps/publish-pipeline-artifacts.yml deleted file mode 100644 index 2efec04dc..000000000 --- a/eng/common/core-templates/steps/publish-pipeline-artifacts.yml +++ /dev/null @@ -1,20 +0,0 @@ -parameters: -- name: is1ESPipeline - type: boolean - default: false - -- name: args - type: object - default: {} - -steps: -- ${{ if ne(parameters.is1ESPipeline, true) }}: - - template: /eng/common/templates/steps/publish-pipeline-artifacts.yml - parameters: - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} -- ${{ else }}: - - template: /eng/common/templates-official/steps/publish-pipeline-artifacts.yml - parameters: - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/core-templates/steps/retain-build.yml b/eng/common/core-templates/steps/retain-build.yml deleted file mode 100644 index 83d97a26a..000000000 --- a/eng/common/core-templates/steps/retain-build.yml +++ /dev/null @@ -1,28 +0,0 @@ -parameters: - # Optional azure devops PAT with build execute permissions for the build's organization, - # only needed if the build that should be retained ran on a different organization than - # the pipeline where this template is executing from - Token: '' - # Optional BuildId to retain, defaults to the current running build - BuildId: '' - # Azure devops Organization URI for the build in the https://dev.azure.com/ format. - # Defaults to the organization the current pipeline is running on - AzdoOrgUri: '$(System.CollectionUri)' - # Azure devops project for the build. Defaults to the project the current pipeline is running on - AzdoProject: '$(System.TeamProject)' - -steps: - - task: powershell@2 - inputs: - targetType: 'filePath' - filePath: eng/common/retain-build.ps1 - pwsh: true - arguments: > - -AzdoOrgUri: ${{parameters.AzdoOrgUri}} - -AzdoProject ${{parameters.AzdoProject}} - -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }} - -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}} - displayName: Enable permanent build retention - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - BUILD_ID: $(Build.BuildId) \ No newline at end of file diff --git a/eng/common/core-templates/steps/send-to-helix.yml b/eng/common/core-templates/steps/send-to-helix.yml deleted file mode 100644 index 68fa739c4..000000000 --- a/eng/common/core-templates/steps/send-to-helix.yml +++ /dev/null @@ -1,93 +0,0 @@ -# Please remember to update the documentation if you make changes to these parameters! -parameters: - HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/ - HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/' - HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number - HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues - HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group - HelixProjectPath: 'eng/common/helixpublish.proj' # optional -- path to the project file to build relative to BUILD_SOURCESDIRECTORY - HelixProjectArguments: '' # optional -- arguments passed to the build command - HelixConfiguration: '' # optional -- additional property attached to a job - HelixPreCommands: '' # optional -- commands to run before Helix work item execution - HelixPostCommands: '' # optional -- commands to run after Helix work item execution - WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects - WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects - WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects - CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload - XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true - XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects - XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects - XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner - XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects - IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion - DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json - DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json - WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget." - IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set - HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net ) - Creator: '' # optional -- if the build is external, use this to specify who is sending the job - DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO - condition: succeeded() # optional -- condition for step to execute; defaults to succeeded() - continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false - -steps: - - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"' - displayName: ${{ parameters.DisplayNamePrefix }} (Windows) - env: - BuildConfig: $(_BuildConfig) - HelixSource: ${{ parameters.HelixSource }} - HelixType: ${{ parameters.HelixType }} - HelixBuild: ${{ parameters.HelixBuild }} - HelixConfiguration: ${{ parameters.HelixConfiguration }} - HelixTargetQueues: ${{ parameters.HelixTargetQueues }} - HelixAccessToken: ${{ parameters.HelixAccessToken }} - HelixPreCommands: ${{ parameters.HelixPreCommands }} - HelixPostCommands: ${{ parameters.HelixPostCommands }} - WorkItemDirectory: ${{ parameters.WorkItemDirectory }} - WorkItemCommand: ${{ parameters.WorkItemCommand }} - WorkItemTimeout: ${{ parameters.WorkItemTimeout }} - CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} - XUnitProjects: ${{ parameters.XUnitProjects }} - XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} - XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} - XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} - XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} - IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} - DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} - DotNetCliVersion: ${{ parameters.DotNetCliVersion }} - WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} - HelixBaseUri: ${{ parameters.HelixBaseUri }} - Creator: ${{ parameters.Creator }} - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT')) - continueOnError: ${{ parameters.continueOnError }} - - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog - displayName: ${{ parameters.DisplayNamePrefix }} (Unix) - env: - BuildConfig: $(_BuildConfig) - HelixSource: ${{ parameters.HelixSource }} - HelixType: ${{ parameters.HelixType }} - HelixBuild: ${{ parameters.HelixBuild }} - HelixConfiguration: ${{ parameters.HelixConfiguration }} - HelixTargetQueues: ${{ parameters.HelixTargetQueues }} - HelixAccessToken: ${{ parameters.HelixAccessToken }} - HelixPreCommands: ${{ parameters.HelixPreCommands }} - HelixPostCommands: ${{ parameters.HelixPostCommands }} - WorkItemDirectory: ${{ parameters.WorkItemDirectory }} - WorkItemCommand: ${{ parameters.WorkItemCommand }} - WorkItemTimeout: ${{ parameters.WorkItemTimeout }} - CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} - XUnitProjects: ${{ parameters.XUnitProjects }} - XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} - XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} - XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} - XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} - IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} - DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} - DotNetCliVersion: ${{ parameters.DotNetCliVersion }} - WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} - HelixBaseUri: ${{ parameters.HelixBaseUri }} - Creator: ${{ parameters.Creator }} - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT')) - continueOnError: ${{ parameters.continueOnError }} diff --git a/eng/common/core-templates/steps/source-build.yml b/eng/common/core-templates/steps/source-build.yml deleted file mode 100644 index acf16ed34..000000000 --- a/eng/common/core-templates/steps/source-build.yml +++ /dev/null @@ -1,65 +0,0 @@ -parameters: - # This template adds arcade-powered source-build to CI. - - # This is a 'steps' template, and is intended for advanced scenarios where the existing build - # infra has a careful build methodology that must be followed. For example, a repo - # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline - # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to - # GitHub. Using this steps template leaves room for that infra to be included. - - # Defines the platform on which to run the steps. See 'eng/common/core-templates/job/source-build.yml' - # for details. The entire object is described in the 'job' template for simplicity, even though - # the usage of the properties on this object is split between the 'job' and 'steps' templates. - platform: {} - is1ESPipeline: false - -steps: -# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.) -- script: | - set -x - df -h - - # If building on the internal project, the internal storage variable may be available (usually only if needed) - # In that case, add variables to allow the download of internal runtimes if the specified versions are not found - # in the default public locations. - internalRuntimeDownloadArgs= - if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then - internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://ci.dot.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://ci.dot.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)' - fi - - buildConfig=Release - # Check if AzDO substitutes in a build config from a variable, and use it if so. - if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then - buildConfig='$(_BuildConfig)' - fi - - targetRidArgs= - if [ '${{ parameters.platform.targetRID }}' != '' ]; then - targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}' - fi - - portableBuildArgs= - if [ '${{ parameters.platform.portableBuild }}' != '' ]; then - portableBuildArgs='/p:PortableBuild=${{ parameters.platform.portableBuild }}' - fi - - ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \ - --configuration $buildConfig \ - --restore --build --pack -bl \ - --source-build \ - ${{ parameters.platform.buildArguments }} \ - $internalRuntimeDownloadArgs \ - $targetRidArgs \ - $portableBuildArgs \ - displayName: Build - -- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - args: - displayName: Publish BuildLogs - targetPath: artifacts/log/${{ coalesce(variables._BuildConfig, 'Release') }} - artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt) - continueOnError: true - condition: succeededOrFailed() - sbomEnabled: false # we don't need SBOM for logs diff --git a/eng/common/core-templates/steps/source-index-stage1-publish.yml b/eng/common/core-templates/steps/source-index-stage1-publish.yml deleted file mode 100644 index c2917c1ef..000000000 --- a/eng/common/core-templates/steps/source-index-stage1-publish.yml +++ /dev/null @@ -1,35 +0,0 @@ -parameters: - sourceIndexUploadPackageVersion: 2.0.0-20250425.2 - sourceIndexProcessBinlogPackageVersion: 1.0.1-20250515.1 - sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json - binlogPath: artifacts/log/Debug/Build.binlog - -steps: -- task: UseDotNet@2 - displayName: "Source Index: Use .NET 9 SDK" - inputs: - packageType: sdk - version: 9.0.x - installationPath: $(Agent.TempDirectory)/dotnet - workingDirectory: $(Agent.TempDirectory) - -- script: | - $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version ${{parameters.sourceIndexProcessBinlogPackageVersion}} --add-source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools - $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version ${{parameters.sourceIndexUploadPackageVersion}} --add-source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools - displayName: "Source Index: Download netsourceindex Tools" - # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk. - workingDirectory: $(Agent.TempDirectory) - -- script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i ${{parameters.BinlogPath}} -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output - displayName: "Source Index: Process Binlog into indexable sln" - -- ${{ if and(ne(parameters.runAsPublic, 'true'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: AzureCLI@2 - displayName: "Source Index: Upload Source Index stage1 artifacts to Azure" - inputs: - azureSubscription: 'SourceDotNet Stage1 Publish' - addSpnToEnvironment: true - scriptType: 'ps' - scriptLocation: 'inlineScript' - inlineScript: | - $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1 diff --git a/eng/common/core-templates/variables/pool-providers.yml b/eng/common/core-templates/variables/pool-providers.yml deleted file mode 100644 index 41053d382..000000000 --- a/eng/common/core-templates/variables/pool-providers.yml +++ /dev/null @@ -1,8 +0,0 @@ -parameters: - is1ESPipeline: false - -variables: - - ${{ if eq(parameters.is1ESPipeline, 'true') }}: - - template: /eng/common/templates-official/variables/pool-providers.yml - - ${{ else }}: - - template: /eng/common/templates/variables/pool-providers.yml \ No newline at end of file diff --git a/eng/common/cross/arm/tizen/tizen.patch b/eng/common/cross/arm/tizen/tizen.patch deleted file mode 100644 index fb12ade72..000000000 --- a/eng/common/cross/arm/tizen/tizen.patch +++ /dev/null @@ -1,9 +0,0 @@ -diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so ---- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900 -+++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900 -@@ -2,4 +2,4 @@ - Use the shared library, but some functions are only in - the static library, so try that secondarily. */ - OUTPUT_FORMAT(elf32-littlearm) --GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-armhf.so.3 ) ) -+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-armhf.so.3 ) ) diff --git a/eng/common/cross/arm64/tizen/tizen.patch b/eng/common/cross/arm64/tizen/tizen.patch deleted file mode 100644 index 2cebc5473..000000000 --- a/eng/common/cross/arm64/tizen/tizen.patch +++ /dev/null @@ -1,9 +0,0 @@ -diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so ---- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900 -+++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900 -@@ -2,4 +2,4 @@ - Use the shared library, but some functions are only in - the static library, so try that secondarily. */ - OUTPUT_FORMAT(elf64-littleaarch64) --GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-aarch64.so.1 ) ) -+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-aarch64.so.1 ) ) diff --git a/eng/common/cross/armel/tizen/tizen.patch b/eng/common/cross/armel/tizen/tizen.patch deleted file mode 100644 index ca7c7c1ff..000000000 --- a/eng/common/cross/armel/tizen/tizen.patch +++ /dev/null @@ -1,9 +0,0 @@ -diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so ---- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900 -+++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900 -@@ -2,4 +2,4 @@ - Use the shared library, but some functions are only in - the static library, so try that secondarily. */ - OUTPUT_FORMAT(elf32-littlearm) --GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.3 ) ) -+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.3 ) ) diff --git a/eng/common/cross/build-android-rootfs.sh b/eng/common/cross/build-android-rootfs.sh deleted file mode 100755 index fbd8d8084..000000000 --- a/eng/common/cross/build-android-rootfs.sh +++ /dev/null @@ -1,146 +0,0 @@ -#!/usr/bin/env bash -set -e -__NDK_Version=r21 - -usage() -{ - echo "Creates a toolchain and sysroot used for cross-compiling for Android." - echo - echo "Usage: $0 [BuildArch] [ApiLevel] [--ndk NDKVersion]" - echo - echo "BuildArch is the target architecture of Android. Currently only arm64 is supported." - echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html" - echo "NDKVersion is the version of Android NDK. The default is r21. See https://developer.android.com/ndk/downloads/revision_history" - echo - echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior" - echo "by setting the TOOLCHAIN_DIR environment variable" - echo - echo "By default, the NDK will be downloaded into the cross/android-rootfs/android-ndk-$__NDK_Version directory. If you already have an NDK installation," - echo "you can set the NDK_DIR environment variable to have this script use that installation of the NDK." - echo "By default, this script will generate a file, android_platform, in the root of the ROOTFS_DIR directory that contains the RID for the supported and tested Android build: android.28-arm64. This file is to replace '/etc/os-release', which is not available for Android." - exit 1 -} - -__ApiLevel=28 # The minimum platform for arm64 is API level 21 but the minimum version that support glob(3) is 28. See $ANDROID_NDK/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/include/glob.h -__BuildArch=arm64 -__AndroidArch=aarch64 -__AndroidToolchain=aarch64-linux-android - -while :; do - if [[ "$#" -le 0 ]]; then - break - fi - - i=$1 - - lowerI="$(echo $i | tr "[:upper:]" "[:lower:]")" - case $lowerI in - -?|-h|--help) - usage - exit 1 - ;; - arm64) - __BuildArch=arm64 - __AndroidArch=aarch64 - __AndroidToolchain=aarch64-linux-android - ;; - arm) - __BuildArch=arm - __AndroidArch=arm - __AndroidToolchain=arm-linux-androideabi - ;; - --ndk) - shift - __NDK_Version=$1 - ;; - *[0-9]) - __ApiLevel=$i - ;; - *) - __UnprocessedBuildArgs="$__UnprocessedBuildArgs $i" - ;; - esac - shift -done - -if [[ "$__NDK_Version" == "r21" ]] || [[ "$__NDK_Version" == "r22" ]]; then - __NDK_File_Arch_Spec=-x86_64 - __SysRoot=sysroot -else - __NDK_File_Arch_Spec= - __SysRoot=toolchains/llvm/prebuilt/linux-x86_64/sysroot -fi - -# Obtain the location of the bash script to figure out where the root of the repo is. -__ScriptBaseDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" - -__CrossDir="$__ScriptBaseDir/../../../.tools/android-rootfs" - -if [[ ! -f "$__CrossDir" ]]; then - mkdir -p "$__CrossDir" -fi - -# Resolve absolute path to avoid `../` in build logs -__CrossDir="$( cd "$__CrossDir" && pwd )" - -__NDK_Dir="$__CrossDir/android-ndk-$__NDK_Version" -__lldb_Dir="$__CrossDir/lldb" -__ToolchainDir="$__CrossDir/android-ndk-$__NDK_Version" - -if [[ -n "$TOOLCHAIN_DIR" ]]; then - __ToolchainDir=$TOOLCHAIN_DIR -fi - -if [[ -n "$NDK_DIR" ]]; then - __NDK_Dir=$NDK_DIR -fi - -echo "Target API level: $__ApiLevel" -echo "Target architecture: $__BuildArch" -echo "NDK version: $__NDK_Version" -echo "NDK location: $__NDK_Dir" -echo "Target Toolchain location: $__ToolchainDir" - -# Download the NDK if required -if [ ! -d $__NDK_Dir ]; then - echo Downloading the NDK into $__NDK_Dir - mkdir -p $__NDK_Dir - wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux$__NDK_File_Arch_Spec.zip -O $__CrossDir/android-ndk-$__NDK_Version-linux.zip - unzip -q $__CrossDir/android-ndk-$__NDK_Version-linux.zip -d $__CrossDir -fi - -if [ ! -d $__lldb_Dir ]; then - mkdir -p $__lldb_Dir - echo Downloading LLDB into $__lldb_Dir - wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/lldb-2.3.3614996-linux-x86_64.zip -O $__CrossDir/lldb-2.3.3614996-linux-x86_64.zip - unzip -q $__CrossDir/lldb-2.3.3614996-linux-x86_64.zip -d $__lldb_Dir -fi - -echo "Download dependencies..." -__TmpDir=$__CrossDir/tmp/$__BuildArch/ -mkdir -p "$__TmpDir" - -# combined dependencies for coreclr, installer and libraries -__AndroidPackages="libicu" -__AndroidPackages+=" libandroid-glob" -__AndroidPackages+=" liblzma" -__AndroidPackages+=" krb5" -__AndroidPackages+=" openssl" - -for path in $(wget -qO- https://packages.termux.dev/termux-main-21/dists/stable/main/binary-$__AndroidArch/Packages |\ - grep -A15 "Package: \(${__AndroidPackages// /\\|}\)" | grep -v "static\|tool" | grep Filename); do - - if [[ "$path" != "Filename:" ]]; then - echo "Working on: $path" - wget -qO- https://packages.termux.dev/termux-main-21/$path | dpkg -x - "$__TmpDir" - fi -done - -cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/$__SysRoot/usr/" - -# Generate platform file for build.sh script to assign to __DistroRid -echo "Generating platform file..." -echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/$__SysRoot/android_platform - -echo "Now to build coreclr, libraries and host; run:" -echo ROOTFS_DIR=$(realpath $__ToolchainDir/$__SysRoot) ./build.sh clr+libs+host --cross --arch $__BuildArch diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh deleted file mode 100755 index 8abfb71f7..000000000 --- a/eng/common/cross/build-rootfs.sh +++ /dev/null @@ -1,835 +0,0 @@ -#!/usr/bin/env bash - -set -e - -usage() -{ - echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir ]" - echo "BuildArch can be: arm(default), arm64, armel, armv6, loongarch64, ppc64le, riscv64, s390x, x64, x86" - echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine" - echo " for alpine can be specified with version: alpineX.YY or alpineedge" - echo " for FreeBSD can be: freebsd13, freebsd14" - echo " for illumos can be: illumos" - echo " for Haiku can be: haiku." - echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD" - echo "llvmx[.y] - optional, LLVM version for LLVM related packages." - echo "--skipunmount - optional, will skip the unmount of rootfs folder." - echo "--skipsigcheck - optional, will skip package signature checks (allowing untrusted packages)." - echo "--skipemulation - optional, will skip qemu and debootstrap requirement when building environment for debian based systems." - echo "--use-mirror - optional, use mirror URL to fetch resources, when available." - echo "--jobs N - optional, restrict to N jobs." - exit 1 -} - -__CodeName=xenial -__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) -__BuildArch=arm -__AlpineArch=armv7 -__FreeBSDArch=arm -__FreeBSDMachineArch=armv7 -__IllumosArch=arm7 -__HaikuArch=arm -__QEMUArch=arm -__UbuntuArch=armhf -__UbuntuRepo= -__UbuntuSuites="updates security backports" -__LLDB_Package="liblldb-3.9-dev" -__SkipUnmount=0 - -# base development support -__UbuntuPackages="build-essential" - -__AlpinePackages="alpine-base" -__AlpinePackages+=" build-base" -__AlpinePackages+=" linux-headers" -__AlpinePackages+=" lldb-dev" -__AlpinePackages+=" python3" -__AlpinePackages+=" libedit" - -# symlinks fixer -__UbuntuPackages+=" symlinks" - -# runtime dependencies -__UbuntuPackages+=" libicu-dev" -__UbuntuPackages+=" liblttng-ust-dev" -__UbuntuPackages+=" libunwind8-dev" - -__AlpinePackages+=" gettext-dev" -__AlpinePackages+=" icu-dev" -__AlpinePackages+=" libunwind-dev" -__AlpinePackages+=" lttng-ust-dev" -__AlpinePackages+=" compiler-rt" - -# runtime libraries' dependencies -__UbuntuPackages+=" libcurl4-openssl-dev" -__UbuntuPackages+=" libkrb5-dev" -__UbuntuPackages+=" libssl-dev" -__UbuntuPackages+=" zlib1g-dev" -__UbuntuPackages+=" libbrotli-dev" - -__AlpinePackages+=" curl-dev" -__AlpinePackages+=" krb5-dev" -__AlpinePackages+=" openssl-dev" -__AlpinePackages+=" zlib-dev" - -__FreeBSDBase="13.4-RELEASE" -__FreeBSDPkg="1.21.3" -__FreeBSDABI="13" -__FreeBSDPackages="libunwind" -__FreeBSDPackages+=" icu" -__FreeBSDPackages+=" libinotify" -__FreeBSDPackages+=" openssl" -__FreeBSDPackages+=" krb5" -__FreeBSDPackages+=" terminfo-db" - -__IllumosPackages="icu" -__IllumosPackages+=" mit-krb5" -__IllumosPackages+=" openssl" -__IllumosPackages+=" zlib" - -__HaikuPackages="gcc_syslibs" -__HaikuPackages+=" gcc_syslibs_devel" -__HaikuPackages+=" gmp" -__HaikuPackages+=" gmp_devel" -__HaikuPackages+=" icu[0-9]+" -__HaikuPackages+=" icu[0-9]*_devel" -__HaikuPackages+=" krb5" -__HaikuPackages+=" krb5_devel" -__HaikuPackages+=" libiconv" -__HaikuPackages+=" libiconv_devel" -__HaikuPackages+=" llvm[0-9]*_libunwind" -__HaikuPackages+=" llvm[0-9]*_libunwind_devel" -__HaikuPackages+=" mpfr" -__HaikuPackages+=" mpfr_devel" -__HaikuPackages+=" openssl3" -__HaikuPackages+=" openssl3_devel" -__HaikuPackages+=" zlib" -__HaikuPackages+=" zlib_devel" - -# ML.NET dependencies -__UbuntuPackages+=" libomp5" -__UbuntuPackages+=" libomp-dev" - -# Taken from https://github.com/alpinelinux/alpine-chroot-install/blob/6d08f12a8a70dd9b9dc7d997c88aa7789cc03c42/alpine-chroot-install#L85-L133 -__AlpineKeys=' -4a6a0840:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1yHJxQgsHQREclQu4Ohe\nqxTxd1tHcNnvnQTu/UrTky8wWvgXT+jpveroeWWnzmsYlDI93eLI2ORakxb3gA2O\nQ0Ry4ws8vhaxLQGC74uQR5+/yYrLuTKydFzuPaS1dK19qJPXB8GMdmFOijnXX4SA\njixuHLe1WW7kZVtjL7nufvpXkWBGjsfrvskdNA/5MfxAeBbqPgaq0QMEfxMAn6/R\nL5kNepi/Vr4S39Xvf2DzWkTLEK8pcnjNkt9/aafhWqFVW7m3HCAII6h/qlQNQKSo\nGuH34Q8GsFG30izUENV9avY7hSLq7nggsvknlNBZtFUcmGoQrtx3FmyYsIC8/R+B\nywIDAQAB -5243ef4b:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvNijDxJ8kloskKQpJdx+\nmTMVFFUGDoDCbulnhZMJoKNkSuZOzBoFC94omYPtxnIcBdWBGnrm6ncbKRlR+6oy\nDO0W7c44uHKCFGFqBhDasdI4RCYP+fcIX/lyMh6MLbOxqS22TwSLhCVjTyJeeH7K\naA7vqk+QSsF4TGbYzQDDpg7+6aAcNzg6InNePaywA6hbT0JXbxnDWsB+2/LLSF2G\nmnhJlJrWB1WGjkz23ONIWk85W4S0XB/ewDefd4Ly/zyIciastA7Zqnh7p3Ody6Q0\nsS2MJzo7p3os1smGjUF158s6m/JbVh4DN6YIsxwl2OjDOz9R0OycfJSDaBVIGZzg\ncQIDAQAB -524d27bb:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr8s1q88XpuJWLCZALdKj\nlN8wg2ePB2T9aIcaxryYE/Jkmtu+ZQ5zKq6BT3y/udt5jAsMrhHTwroOjIsF9DeG\ne8Y3vjz+Hh4L8a7hZDaw8jy3CPag47L7nsZFwQOIo2Cl1SnzUc6/owoyjRU7ab0p\niWG5HK8IfiybRbZxnEbNAfT4R53hyI6z5FhyXGS2Ld8zCoU/R4E1P0CUuXKEN4p0\n64dyeUoOLXEWHjgKiU1mElIQj3k/IF02W89gDj285YgwqA49deLUM7QOd53QLnx+\nxrIrPv3A+eyXMFgexNwCKQU9ZdmWa00MjjHlegSGK8Y2NPnRoXhzqSP9T9i2HiXL\nVQIDAQAB -5261cecb:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwlzMkl7b5PBdfMzGdCT0\ncGloRr5xGgVmsdq5EtJvFkFAiN8Ac9MCFy/vAFmS8/7ZaGOXoCDWbYVLTLOO2qtX\nyHRl+7fJVh2N6qrDDFPmdgCi8NaE+3rITWXGrrQ1spJ0B6HIzTDNEjRKnD4xyg4j\ng01FMcJTU6E+V2JBY45CKN9dWr1JDM/nei/Pf0byBJlMp/mSSfjodykmz4Oe13xB\nCa1WTwgFykKYthoLGYrmo+LKIGpMoeEbY1kuUe04UiDe47l6Oggwnl+8XD1MeRWY\nsWgj8sF4dTcSfCMavK4zHRFFQbGp/YFJ/Ww6U9lA3Vq0wyEI6MCMQnoSMFwrbgZw\nwwIDAQAB -58199dcc:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3v8/ye/V/t5xf4JiXLXa\nhWFRozsnmn3hobON20GdmkrzKzO/eUqPOKTpg2GtvBhK30fu5oY5uN2ORiv2Y2ht\neLiZ9HVz3XP8Fm9frha60B7KNu66FO5P2o3i+E+DWTPqqPcCG6t4Znk2BypILcit\nwiPKTsgbBQR2qo/cO01eLLdt6oOzAaF94NH0656kvRewdo6HG4urbO46tCAizvCR\nCA7KGFMyad8WdKkTjxh8YLDLoOCtoZmXmQAiwfRe9pKXRH/XXGop8SYptLqyVVQ+\ntegOD9wRs2tOlgcLx4F/uMzHN7uoho6okBPiifRX+Pf38Vx+ozXh056tjmdZkCaV\naQIDAQAB -58cbb476:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoSPnuAGKtRIS5fEgYPXD\n8pSGvKAmIv3A08LBViDUe+YwhilSHbYXUEAcSH1KZvOo1WT1x2FNEPBEFEFU1Eyc\n+qGzbA03UFgBNvArurHQ5Z/GngGqE7IarSQFSoqewYRtFSfp+TL9CUNBvM0rT7vz\n2eMu3/wWG+CBmb92lkmyWwC1WSWFKO3x8w+Br2IFWvAZqHRt8oiG5QtYvcZL6jym\nY8T6sgdDlj+Y+wWaLHs9Fc+7vBuyK9C4O1ORdMPW15qVSl4Lc2Wu1QVwRiKnmA+c\nDsH/m7kDNRHM7TjWnuj+nrBOKAHzYquiu5iB3Qmx+0gwnrSVf27Arc3ozUmmJbLj\nzQIDAQAB -58e4f17d:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvBxJN9ErBgdRcPr5g4hV\nqyUSGZEKuvQliq2Z9SRHLh2J43+EdB6A+yzVvLnzcHVpBJ+BZ9RV30EM9guck9sh\nr+bryZcRHyjG2wiIEoduxF2a8KeWeQH7QlpwGhuobo1+gA8L0AGImiA6UP3LOirl\nI0G2+iaKZowME8/tydww4jx5vG132JCOScMjTalRsYZYJcjFbebQQolpqRaGB4iG\nWqhytWQGWuKiB1A22wjmIYf3t96l1Mp+FmM2URPxD1gk/BIBnX7ew+2gWppXOK9j\n1BJpo0/HaX5XoZ/uMqISAAtgHZAqq+g3IUPouxTphgYQRTRYpz2COw3NF43VYQrR\nbQIDAQAB -60ac2099:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwR4uJVtJOnOFGchnMW5Y\nj5/waBdG1u5BTMlH+iQMcV5+VgWhmpZHJCBz3ocD+0IGk2I68S5TDOHec/GSC0lv\n6R9o6F7h429GmgPgVKQsc8mPTPtbjJMuLLs4xKc+viCplXc0Nc0ZoHmCH4da6fCV\ntdpHQjVe6F9zjdquZ4RjV6R6JTiN9v924dGMAkbW/xXmamtz51FzondKC52Gh8Mo\n/oA0/T0KsCMCi7tb4QNQUYrf+Xcha9uus4ww1kWNZyfXJB87a2kORLiWMfs2IBBJ\nTmZ2Fnk0JnHDb8Oknxd9PvJPT0mvyT8DA+KIAPqNvOjUXP4bnjEHJcoCP9S5HkGC\nIQIDAQAB -6165ee59:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAutQkua2CAig4VFSJ7v54\nALyu/J1WB3oni7qwCZD3veURw7HxpNAj9hR+S5N/pNeZgubQvJWyaPuQDm7PTs1+\ntFGiYNfAsiibX6Rv0wci3M+z2XEVAeR9Vzg6v4qoofDyoTbovn2LztaNEjTkB+oK\ntlvpNhg1zhou0jDVYFniEXvzjckxswHVb8cT0OMTKHALyLPrPOJzVtM9C1ew2Nnc\n3848xLiApMu3NBk0JqfcS3Bo5Y2b1FRVBvdt+2gFoKZix1MnZdAEZ8xQzL/a0YS5\nHd0wj5+EEKHfOd3A75uPa/WQmA+o0cBFfrzm69QDcSJSwGpzWrD1ScH3AK8nWvoj\nv7e9gukK/9yl1b4fQQ00vttwJPSgm9EnfPHLAtgXkRloI27H6/PuLoNvSAMQwuCD\nhQRlyGLPBETKkHeodfLoULjhDi1K2gKJTMhtbnUcAA7nEphkMhPWkBpgFdrH+5z4\nLxy+3ek0cqcI7K68EtrffU8jtUj9LFTUC8dERaIBs7NgQ/LfDbDfGh9g6qVj1hZl\nk9aaIPTm/xsi8v3u+0qaq7KzIBc9s59JOoA8TlpOaYdVgSQhHHLBaahOuAigH+VI\nisbC9vmqsThF2QdDtQt37keuqoda2E6sL7PUvIyVXDRfwX7uMDjlzTxHTymvq2Ck\nhtBqojBnThmjJQFgZXocHG8CAwEAAQ== -61666e3f:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAlEyxkHggKCXC2Wf5Mzx4\nnZLFZvU2bgcA3exfNPO/g1YunKfQY+Jg4fr6tJUUTZ3XZUrhmLNWvpvSwDS19ZmC\nIXOu0+V94aNgnhMsk9rr59I8qcbsQGIBoHzuAl8NzZCgdbEXkiY90w1skUw8J57z\nqCsMBydAueMXuWqF5nGtYbi5vHwK42PffpiZ7G5Kjwn8nYMW5IZdL6ZnMEVJUWC9\nI4waeKg0yskczYDmZUEAtrn3laX9677ToCpiKrvmZYjlGl0BaGp3cxggP2xaDbUq\nqfFxWNgvUAb3pXD09JM6Mt6HSIJaFc9vQbrKB9KT515y763j5CC2KUsilszKi3mB\nHYe5PoebdjS7D1Oh+tRqfegU2IImzSwW3iwA7PJvefFuc/kNIijfS/gH/cAqAK6z\nbhdOtE/zc7TtqW2Wn5Y03jIZdtm12CxSxwgtCF1NPyEWyIxAQUX9ACb3M0FAZ61n\nfpPrvwTaIIxxZ01L3IzPLpbc44x/DhJIEU+iDt6IMTrHOphD9MCG4631eIdB0H1b\n6zbNX1CXTsafqHRFV9XmYYIeOMggmd90s3xIbEujA6HKNP/gwzO6CDJ+nHFDEqoF\nSkxRdTkEqjTjVKieURW7Swv7zpfu5PrsrrkyGnsRrBJJzXlm2FOOxnbI2iSL1B5F\nrO5kbUxFeZUIDq+7Yv4kLWcCAwEAAQ== -616a9724:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnC+bR4bHf/L6QdU4puhQ\ngl1MHePszRC38bzvVFDUJsmCaMCL2suCs2A2yxAgGb9pu9AJYLAmxQC4mM3jNqhg\n/E7yuaBbek3O02zN/ctvflJ250wZCy+z0ZGIp1ak6pu1j14IwHokl9j36zNfGtfv\nADVOcdpWITFFlPqwq1qt/H3UsKVmtiF3BNWWTeUEQwKvlU8ymxgS99yn0+4OPyNT\nL3EUeS+NQJtDS01unau0t7LnjUXn+XIneWny8bIYOQCuVR6s/gpIGuhBaUqwaJOw\n7jkJZYF2Ij7uPb4b5/R3vX2FfxxqEHqssFSg8FFUNTZz3qNZs0CRVyfA972g9WkJ\nhPfn31pQYil4QGRibCMIeU27YAEjXoqfJKEPh4UWMQsQLrEfdGfb8VgwrPbniGfU\nL3jKJR3VAafL9330iawzVQDlIlwGl6u77gEXMl9K0pfazunYhAp+BMP+9ot5ckK+\nosmrqj11qMESsAj083GeFdfV3pXEIwUytaB0AKEht9DbqUfiE/oeZ/LAXgySMtVC\nsbC4ESmgVeY2xSBIJdDyUap7FR49GGrw0W49NUv9gRgQtGGaNVQQO9oGL2PBC41P\niWF9GLoX30HIz1P8PF/cZvicSSPkQf2Z6TV+t0ebdGNS5DjapdnCrq8m9Z0pyKsQ\nuxAL2a7zX8l5i1CZh1ycUGsCAwEAAQ== -616abc23:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0MfCDrhODRCIxR9Dep1s\neXafh5CE5BrF4WbCgCsevyPIdvTeyIaW4vmO3bbG4VzhogDZju+R3IQYFuhoXP5v\nY+zYJGnwrgz3r5wYAvPnLEs1+dtDKYOgJXQj+wLJBW1mzRDL8FoRXOe5iRmn1EFS\nwZ1DoUvyu7/J5r0itKicZp3QKED6YoilXed+1vnS4Sk0mzN4smuMR9eO1mMCqNp9\n9KTfRDHTbakIHwasECCXCp50uXdoW6ig/xUAFanpm9LtK6jctNDbXDhQmgvAaLXZ\nLvFqoaYJ/CvWkyYCgL6qxvMvVmPoRv7OPcyni4xR/WgWa0MSaEWjgPx3+yj9fiMA\n1S02pFWFDOr5OUF/O4YhFJvUCOtVsUPPfA/Lj6faL0h5QI9mQhy5Zb9TTaS9jB6p\nLw7u0dJlrjFedk8KTJdFCcaGYHP6kNPnOxMylcB/5WcztXZVQD5WpCicGNBxCGMm\nW64SgrV7M07gQfL/32QLsdqPUf0i8hoVD8wfQ3EpbQzv6Fk1Cn90bZqZafg8XWGY\nwddhkXk7egrr23Djv37V2okjzdqoyLBYBxMz63qQzFoAVv5VoY2NDTbXYUYytOvG\nGJ1afYDRVWrExCech1mX5ZVUB1br6WM+psFLJFoBFl6mDmiYt0vMYBddKISsvwLl\nIJQkzDwtXzT2cSjoj3T5QekCAwEAAQ== -616ac3bc:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvaaoSLab+IluixwKV5Od\n0gib2YurjPatGIbn5Ov2DLUFYiebj2oJINXJSwUOO+4WcuHFEqiL/1rya+k5hLZt\nhnPL1tn6QD4rESznvGSasRCQNT2vS/oyZbTYJRyAtFkEYLlq0t3S3xBxxHWuvIf0\nqVxVNYpQWyM3N9RIeYBR/euXKJXileSHk/uq1I5wTC0XBIHWcthczGN0m9wBEiWS\n0m3cnPk4q0Ea8mUJ91Rqob19qETz6VbSPYYpZk3qOycjKosuwcuzoMpwU8KRiMFd\n5LHtX0Hx85ghGsWDVtS0c0+aJa4lOMGvJCAOvDfqvODv7gKlCXUpgumGpLdTmaZ8\n1RwqspAe3IqBcdKTqRD4m2mSg23nVx2FAY3cjFvZQtfooT7q1ItRV5RgH6FhQSl7\n+6YIMJ1Bf8AAlLdRLpg+doOUGcEn+pkDiHFgI8ylH1LKyFKw+eXaAml/7DaWZk1d\ndqggwhXOhc/UUZFQuQQ8A8zpA13PcbC05XxN2hyP93tCEtyynMLVPtrRwDnHxFKa\nqKzs3rMDXPSXRn3ZZTdKH3069ApkEjQdpcwUh+EmJ1Ve/5cdtzT6kKWCjKBFZP/s\n91MlRrX2BTRdHaU5QJkUheUtakwxuHrdah2F94lRmsnQlpPr2YseJu6sIE+Dnx4M\nCfhdVbQL2w54R645nlnohu8CAwEAAQ== -616adfeb:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq0BFD1D4lIxQcsqEpQzU\npNCYM3aP1V/fxxVdT4DWvSI53JHTwHQamKdMWtEXetWVbP5zSROniYKFXd/xrD9X\n0jiGHey3lEtylXRIPxe5s+wXoCmNLcJVnvTcDtwx/ne2NLHxp76lyc25At+6RgE6\nADjLVuoD7M4IFDkAsd8UQ8zM0Dww9SylIk/wgV3ZkifecvgUQRagrNUdUjR56EBZ\nraQrev4hhzOgwelT0kXCu3snbUuNY/lU53CoTzfBJ5UfEJ5pMw1ij6X0r5S9IVsy\nKLWH1hiO0NzU2c8ViUYCly4Fe9xMTFc6u2dy/dxf6FwERfGzETQxqZvSfrRX+GLj\n/QZAXiPg5178hT/m0Y3z5IGenIC/80Z9NCi+byF1WuJlzKjDcF/TU72zk0+PNM/H\nKuppf3JT4DyjiVzNC5YoWJT2QRMS9KLP5iKCSThwVceEEg5HfhQBRT9M6KIcFLSs\nmFjx9kNEEmc1E8hl5IR3+3Ry8G5/bTIIruz14jgeY9u5jhL8Vyyvo41jgt9sLHR1\n/J1TxKfkgksYev7PoX6/ZzJ1ksWKZY5NFoDXTNYUgzFUTOoEaOg3BAQKadb3Qbbq\nXIrxmPBdgrn9QI7NCgfnAY3Tb4EEjs3ON/BNyEhUENcXOH6I1NbcuBQ7g9P73kE4\nVORdoc8MdJ5eoKBpO8Ww8HECAwEAAQ== -616ae350:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAyduVzi1mWm+lYo2Tqt/0\nXkCIWrDNP1QBMVPrE0/ZlU2bCGSoo2Z9FHQKz/mTyMRlhNqTfhJ5qU3U9XlyGOPJ\npiM+b91g26pnpXJ2Q2kOypSgOMOPA4cQ42PkHBEqhuzssfj9t7x47ppS94bboh46\nxLSDRff/NAbtwTpvhStV3URYkxFG++cKGGa5MPXBrxIp+iZf9GnuxVdST5PGiVGP\nODL/b69sPJQNbJHVquqUTOh5Ry8uuD2WZuXfKf7/C0jC/ie9m2+0CttNu9tMciGM\nEyKG1/Xhk5iIWO43m4SrrT2WkFlcZ1z2JSf9Pjm4C2+HovYpihwwdM/OdP8Xmsnr\nDzVB4YvQiW+IHBjStHVuyiZWc+JsgEPJzisNY0Wyc/kNyNtqVKpX6dRhMLanLmy+\nf53cCSI05KPQAcGj6tdL+D60uKDkt+FsDa0BTAobZ31OsFVid0vCXtsbplNhW1IF\nHwsGXBTVcfXg44RLyL8Lk/2dQxDHNHzAUslJXzPxaHBLmt++2COa2EI1iWlvtznk\nOk9WP8SOAIj+xdqoiHcC4j72BOVVgiITIJNHrbppZCq6qPR+fgXmXa+sDcGh30m6\n9Wpbr28kLMSHiENCWTdsFij+NQTd5S47H7XTROHnalYDuF1RpS+DpQidT5tUimaT\nJZDr++FjKrnnijbyNF8b98UCAwEAAQ== -616db30d:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnpUpyWDWjlUk3smlWeA0\nlIMW+oJ38t92CRLHH3IqRhyECBRW0d0aRGtq7TY8PmxjjvBZrxTNDpJT6KUk4LRm\na6A6IuAI7QnNK8SJqM0DLzlpygd7GJf8ZL9SoHSH+gFsYF67Cpooz/YDqWrlN7Vw\ntO00s0B+eXy+PCXYU7VSfuWFGK8TGEv6HfGMALLjhqMManyvfp8hz3ubN1rK3c8C\nUS/ilRh1qckdbtPvoDPhSbTDmfU1g/EfRSIEXBrIMLg9ka/XB9PvWRrekrppnQzP\nhP9YE3x/wbFc5QqQWiRCYyQl/rgIMOXvIxhkfe8H5n1Et4VAorkpEAXdsfN8KSVv\nLSMazVlLp9GYq5SUpqYX3KnxdWBgN7BJoZ4sltsTpHQ/34SXWfu3UmyUveWj7wp0\nx9hwsPirVI00EEea9AbP7NM2rAyu6ukcm4m6ATd2DZJIViq2es6m60AE6SMCmrQF\nwmk4H/kdQgeAELVfGOm2VyJ3z69fQuywz7xu27S6zTKi05Qlnohxol4wVb6OB7qG\nLPRtK9ObgzRo/OPumyXqlzAi/Yvyd1ZQk8labZps3e16bQp8+pVPiumWioMFJDWV\nGZjCmyMSU8V6MB6njbgLHoyg2LCukCAeSjbPGGGYhnKLm1AKSoJh3IpZuqcKCk5C\n8CM1S15HxV78s9dFntEqIokCAwEAAQ== -66ba20fe:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtfB12w4ZgqsXWZDfUAV/\n6Y4aHUKIu3q4SXrNZ7CXF9nXoAVYrS7NAxJdAodsY3vPCN0g5O8DFXR+390LdOuQ\n+HsGKCc1k5tX5ZXld37EZNTNSbR0k+NKhd9h6X3u6wqPOx7SIKxwAQR8qeeFq4pP\nrt9GAGlxtuYgzIIcKJPwE0dZlcBCg+GnptCUZXp/38BP1eYC+xTXSL6Muq1etYfg\nodXdb7Yl+2h1IHuOwo5rjgY5kpY7GcAs8AjGk3lDD/av60OTYccknH0NCVSmPoXK\nvrxDBOn0LQRNBLcAfnTKgHrzy0Q5h4TNkkyTgxkoQw5ObDk9nnabTxql732yy9BY\ns+hM9+dSFO1HKeVXreYSA2n1ndF18YAvAumzgyqzB7I4pMHXq1kC/8bONMJxwSkS\nYm6CoXKyavp7RqGMyeVpRC7tV+blkrrUml0BwNkxE+XnwDRB3xDV6hqgWe0XrifD\nYTfvd9ScZQP83ip0r4IKlq4GMv/R5shcCRJSkSZ6QSGshH40JYSoiwJf5FHbj9ND\n7do0UAqebWo4yNx63j/wb2ULorW3AClv0BCFSdPsIrCStiGdpgJDBR2P2NZOCob3\nG9uMj+wJD6JJg2nWqNJxkANXX37Qf8plgzssrhrgOvB0fjjS7GYhfkfmZTJ0wPOw\nA8+KzFseBh4UFGgue78KwgkCAwEAAQ== -' -__Keyring= -__KeyringFile="/usr/share/keyrings/ubuntu-archive-keyring.gpg" -__SkipSigCheck=0 -__SkipEmulation=0 -__UseMirror=0 - -__UnprocessedBuildArgs= -while :; do - if [[ "$#" -le 0 ]]; then - break - fi - - lowerI="$(echo "$1" | tr "[:upper:]" "[:lower:]")" - case $lowerI in - -\?|-h|--help) - usage - ;; - arm) - __BuildArch=arm - __UbuntuArch=armhf - __AlpineArch=armv7 - __QEMUArch=arm - ;; - arm64) - __BuildArch=arm64 - __UbuntuArch=arm64 - __AlpineArch=aarch64 - __QEMUArch=aarch64 - __FreeBSDArch=arm64 - __FreeBSDMachineArch=aarch64 - ;; - armel) - __BuildArch=armel - __UbuntuArch=armel - __UbuntuRepo="http://archive.debian.org/debian/" - __CodeName=buster - __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" - __LLDB_Package="liblldb-6.0-dev" - __UbuntuPackages="${__UbuntuPackages// libomp-dev/}" - __UbuntuPackages="${__UbuntuPackages// libomp5/}" - __UbuntuSuites= - ;; - armv6) - __BuildArch=armv6 - __UbuntuArch=armhf - __QEMUArch=arm - __UbuntuRepo="http://raspbian.raspberrypi.org/raspbian/" - __CodeName=buster - __KeyringFile="/usr/share/keyrings/raspbian-archive-keyring.gpg" - __LLDB_Package="liblldb-6.0-dev" - __UbuntuSuites= - - if [[ -e "$__KeyringFile" ]]; then - __Keyring="--keyring $__KeyringFile" - fi - ;; - loongarch64) - __BuildArch=loongarch64 - __AlpineArch=loongarch64 - __QEMUArch=loongarch64 - __UbuntuArch=loong64 - __UbuntuSuites=unreleased - __LLDB_Package="liblldb-19-dev" - - if [[ "$__CodeName" == "sid" ]]; then - __UbuntuRepo="http://ftp.ports.debian.org/debian-ports/" - fi - ;; - riscv64) - __BuildArch=riscv64 - __AlpineArch=riscv64 - __AlpinePackages="${__AlpinePackages// lldb-dev/}" - __QEMUArch=riscv64 - __UbuntuArch=riscv64 - __UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}" - unset __LLDB_Package - ;; - ppc64le) - __BuildArch=ppc64le - __AlpineArch=ppc64le - __QEMUArch=ppc64le - __UbuntuArch=ppc64el - __UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/" - __UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}" - __UbuntuPackages="${__UbuntuPackages// libomp-dev/}" - __UbuntuPackages="${__UbuntuPackages// libomp5/}" - unset __LLDB_Package - ;; - s390x) - __BuildArch=s390x - __AlpineArch=s390x - __QEMUArch=s390x - __UbuntuArch=s390x - __UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/" - __UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}" - __UbuntuPackages="${__UbuntuPackages// libomp-dev/}" - __UbuntuPackages="${__UbuntuPackages// libomp5/}" - unset __LLDB_Package - ;; - x64) - __BuildArch=x64 - __AlpineArch=x86_64 - __UbuntuArch=amd64 - __FreeBSDArch=amd64 - __FreeBSDMachineArch=amd64 - __illumosArch=x86_64 - __HaikuArch=x86_64 - __UbuntuRepo="http://archive.ubuntu.com/ubuntu/" - ;; - x86) - __BuildArch=x86 - __UbuntuArch=i386 - __AlpineArch=x86 - __UbuntuRepo="http://archive.ubuntu.com/ubuntu/" - ;; - lldb*) - version="$(echo "$lowerI" | tr -d '[:alpha:]-=')" - majorVersion="${version%%.*}" - - [ -z "${version##*.*}" ] && minorVersion="${version#*.}" - if [ -z "$minorVersion" ]; then - minorVersion=0 - fi - - # for versions > 6.0, lldb has dropped the minor version - if [ "$majorVersion" -le 6 ]; then - version="$majorVersion.$minorVersion" - else - version="$majorVersion" - fi - - __LLDB_Package="liblldb-${version}-dev" - ;; - no-lldb) - unset __LLDB_Package - ;; - llvm*) - version="$(echo "$lowerI" | tr -d '[:alpha:]-=')" - __LLVM_MajorVersion="${version%%.*}" - - [ -z "${version##*.*}" ] && __LLVM_MinorVersion="${version#*.}" - if [ -z "$__LLVM_MinorVersion" ]; then - __LLVM_MinorVersion=0 - fi - - # for versions > 6.0, lldb has dropped the minor version - if [ "$__LLVM_MajorVersion" -gt 6 ]; then - __LLVM_MinorVersion= - fi - - ;; - xenial) # Ubuntu 16.04 - __CodeName=xenial - ;; - bionic) # Ubuntu 18.04 - __CodeName=bionic - ;; - focal) # Ubuntu 20.04 - __CodeName=focal - ;; - jammy) # Ubuntu 22.04 - __CodeName=jammy - ;; - noble) # Ubuntu 24.04 - __CodeName=noble - if [[ -z "$__LLDB_Package" ]]; then - __LLDB_Package="liblldb-19-dev" - fi - ;; - stretch) # Debian 9 - __CodeName=stretch - __LLDB_Package="liblldb-6.0-dev" - __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" - - if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ftp.debian.org/debian/" - fi - ;; - buster) # Debian 10 - __CodeName=buster - __LLDB_Package="liblldb-6.0-dev" - __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" - - if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://archive.debian.org/debian/" - fi - ;; - bullseye) # Debian 11 - __CodeName=bullseye - __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" - - if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ftp.debian.org/debian/" - fi - ;; - bookworm) # Debian 12 - __CodeName=bookworm - __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" - - if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ftp.debian.org/debian/" - fi - ;; - sid) # Debian sid - __CodeName=sid - __UbuntuSuites= - - # Debian-Ports architectures need different values - case "$__UbuntuArch" in - amd64|arm64|armel|armhf|i386|mips64el|ppc64el|riscv64|s390x) - __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" - - if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ftp.debian.org/debian/" - fi - ;; - *) - __KeyringFile="/usr/share/keyrings/debian-ports-archive-keyring.gpg" - - if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ftp.ports.debian.org/debian-ports/" - fi - ;; - esac - - if [[ -e "$__KeyringFile" ]]; then - __Keyring="--keyring $__KeyringFile" - fi - ;; - tizen) - __CodeName= - __UbuntuRepo= - __Tizen=tizen - ;; - alpine*) - __CodeName=alpine - __UbuntuRepo= - - if [[ "$lowerI" == "alpineedge" ]]; then - __AlpineVersion=edge - else - version="$(echo "$lowerI" | tr -d '[:alpha:]-=')" - __AlpineMajorVersion="${version%%.*}" - __AlpineMinorVersion="${version#*.}" - __AlpineVersion="$__AlpineMajorVersion.$__AlpineMinorVersion" - fi - ;; - freebsd13) - __CodeName=freebsd - __SkipUnmount=1 - ;; - freebsd14) - __CodeName=freebsd - __FreeBSDBase="14.2-RELEASE" - __FreeBSDABI="14" - __SkipUnmount=1 - ;; - illumos) - __CodeName=illumos - __SkipUnmount=1 - ;; - haiku) - __CodeName=haiku - __SkipUnmount=1 - ;; - --skipunmount) - __SkipUnmount=1 - ;; - --skipsigcheck) - __SkipSigCheck=1 - ;; - --skipemulation) - __SkipEmulation=1 - ;; - --rootfsdir|-rootfsdir) - shift - __RootfsDir="$1" - ;; - --use-mirror) - __UseMirror=1 - ;; - --use-jobs) - shift - MAXJOBS=$1 - ;; - *) - __UnprocessedBuildArgs="$__UnprocessedBuildArgs $1" - ;; - esac - - shift -done - -case "$__AlpineVersion" in - 3.14) __AlpinePackages+=" llvm11-libs" ;; - 3.15) __AlpinePackages+=" llvm12-libs" ;; - 3.16) __AlpinePackages+=" llvm13-libs" ;; - 3.17) __AlpinePackages+=" llvm15-libs" ;; - edge) __AlpineLlvmLibsLookup=1 ;; - *) - if [[ "$__AlpineArch" =~ s390x|ppc64le ]]; then - __AlpineVersion=3.15 # minimum version that supports lldb-dev - __AlpinePackages+=" llvm12-libs" - elif [[ "$__AlpineArch" == "x86" ]]; then - __AlpineVersion=3.17 # minimum version that supports lldb-dev - __AlpinePackages+=" llvm15-libs" - elif [[ "$__AlpineArch" == "riscv64" || "$__AlpineArch" == "loongarch64" ]]; then - __AlpineVersion=3.21 # minimum version that supports lldb-dev - __AlpinePackages+=" llvm19-libs" - elif [[ -n "$__AlpineMajorVersion" ]]; then - # use whichever alpine version is provided and select the latest toolchain libs - __AlpineLlvmLibsLookup=1 - else - __AlpineVersion=3.13 # 3.13 to maximize compatibility - __AlpinePackages+=" llvm10-libs" - fi -esac - -if [[ "$__AlpineVersion" =~ 3\.1[345] ]]; then - # compiler-rt--static was merged in compiler-rt package in alpine 3.16 - # for older versions, we need compiler-rt--static, so replace the name - __AlpinePackages="${__AlpinePackages/compiler-rt/compiler-rt-static}" -fi - -__UbuntuPackages+=" ${__LLDB_Package:-}" - -if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ports.ubuntu.com/" -fi - -if [[ -n "$__LLVM_MajorVersion" ]]; then - __UbuntuPackages+=" libclang-common-${__LLVM_MajorVersion}${__LLVM_MinorVersion:+.$__LLVM_MinorVersion}-dev" -fi - -if [[ -z "$__RootfsDir" && -n "$ROOTFS_DIR" ]]; then - __RootfsDir="$ROOTFS_DIR" -fi - -if [[ -z "$__RootfsDir" ]]; then - __RootfsDir="$__CrossDir/../../../.tools/rootfs/$__BuildArch" -fi - -if [[ -d "$__RootfsDir" ]]; then - if [[ "$__SkipUnmount" == "0" ]]; then - umount "$__RootfsDir"/* || true - fi - rm -rf "$__RootfsDir" -fi - -mkdir -p "$__RootfsDir" -__RootfsDir="$( cd "$__RootfsDir" && pwd )" - -__hasWget= -ensureDownloadTool() -{ - if command -v wget &> /dev/null; then - __hasWget=1 - elif command -v curl &> /dev/null; then - __hasWget=0 - else - >&2 echo "ERROR: either wget or curl is required by this script." - exit 1 - fi -} - -if [[ "$__CodeName" == "alpine" ]]; then - __ApkToolsVersion=2.12.11 - __ApkToolsDir="$(mktemp -d)" - __ApkKeysDir="$(mktemp -d)" - arch="$(uname -m)" - - ensureDownloadTool - - if [[ "$__hasWget" == 1 ]]; then - wget -P "$__ApkToolsDir" "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static" - else - curl -SLO --create-dirs --output-dir "$__ApkToolsDir" "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static" - fi - if [[ "$arch" == "x86_64" ]]; then - __ApkToolsSHA512SUM="53e57b49230da07ef44ee0765b9592580308c407a8d4da7125550957bb72cb59638e04f8892a18b584451c8d841d1c7cb0f0ab680cc323a3015776affaa3be33" - elif [[ "$arch" == "aarch64" ]]; then - __ApkToolsSHA512SUM="9e2b37ecb2b56c05dad23d379be84fd494c14bd730b620d0d576bda760588e1f2f59a7fcb2f2080577e0085f23a0ca8eadd993b4e61c2ab29549fdb71969afd0" - else - echo "WARNING: add missing hash for your host architecture. To find the value, use: 'find /tmp -name apk.static -exec sha512sum {} \;'" - fi - echo "$__ApkToolsSHA512SUM $__ApkToolsDir/apk.static" | sha512sum -c - chmod +x "$__ApkToolsDir/apk.static" - - if [[ "$__AlpineVersion" == "edge" ]]; then - version=edge - else - version="v$__AlpineVersion" - fi - - for line in $__AlpineKeys; do - id="${line%%:*}" - content="${line#*:}" - - echo -e "-----BEGIN PUBLIC KEY-----\n$content\n-----END PUBLIC KEY-----" > "$__ApkKeysDir/alpine-devel@lists.alpinelinux.org-$id.rsa.pub" - done - - if [[ "$__SkipSigCheck" == "1" ]]; then - __ApkSignatureArg="--allow-untrusted" - else - __ApkSignatureArg="--keys-dir $__ApkKeysDir" - fi - - if [[ "$__SkipEmulation" == "1" ]]; then - __NoEmulationArg="--no-scripts" - fi - - # initialize DB - # shellcheck disable=SC2086 - "$__ApkToolsDir/apk.static" \ - -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ - -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ - -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" --initdb add - - if [[ "$__AlpineLlvmLibsLookup" == 1 ]]; then - # shellcheck disable=SC2086 - __AlpinePackages+=" $("$__ApkToolsDir/apk.static" \ - -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ - -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ - -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \ - search 'llvm*-libs' | grep -E '^llvm' | sort | tail -1 | sed 's/-[^-]*//2g')" - fi - - # install all packages in one go - # shellcheck disable=SC2086 - "$__ApkToolsDir/apk.static" \ - -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ - -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ - -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" $__NoEmulationArg \ - add $__AlpinePackages - - rm -r "$__ApkToolsDir" -elif [[ "$__CodeName" == "freebsd" ]]; then - mkdir -p "$__RootfsDir"/usr/local/etc - JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} - - ensureDownloadTool - - if [[ "$__hasWget" == 1 ]]; then - wget -O- "https://download.freebsd.org/ftp/releases/$%7B__FreeBSDArch%7D/$%7B__FreeBSDMachineArch%7D/$%7B__FreeBSDBase%7D/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version - else - curl -SL "https://download.freebsd.org/ftp/releases/$%7B__FreeBSDArch%7D/$%7B__FreeBSDMachineArch%7D/$%7B__FreeBSDBase%7D/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version - fi - echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf - echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf - mkdir -p "$__RootfsDir"/tmp - # get and build package manager - if [[ "$__hasWget" == 1 ]]; then - wget -O- "https://github.com/freebsd/pkg/archive/$%7B__FreeBSDPkg%7D.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf - - else - curl -SL "https://github.com/freebsd/pkg/archive/$%7B__FreeBSDPkg%7D.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf - - fi - cd "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}" - # needed for install to succeed - mkdir -p "$__RootfsDir"/host/etc - ./autogen.sh && ./configure --prefix="$__RootfsDir"/host && make -j "$JOBS" && make install - rm -rf "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}" - # install packages we need. - INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update - # shellcheck disable=SC2086 - INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages -elif [[ "$__CodeName" == "illumos" ]]; then - mkdir "$__RootfsDir/tmp" - pushd "$__RootfsDir/tmp" - JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} - - ensureDownloadTool - - echo "Downloading sysroot." - if [[ "$__hasWget" == 1 ]]; then - wget -O- https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf - - else - curl -SL https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf - - fi - echo "Building binutils. Please wait.." - if [[ "$__hasWget" == 1 ]]; then - wget -O- https://ftp.gnu.org/gnu/binutils/binutils-2.42.tar.xz | tar -xJf - - else - curl -SL https://ftp.gnu.org/gnu/binutils/binutils-2.42.tar.xz | tar -xJf - - fi - mkdir build-binutils && cd build-binutils - ../binutils-2.42/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.11" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" - make -j "$JOBS" && make install && cd .. - echo "Building gcc. Please wait.." - if [[ "$__hasWget" == 1 ]]; then - wget -O- https://ftp.gnu.org/gnu/gcc/gcc-13.3.0/gcc-13.3.0.tar.xz | tar -xJf - - else - curl -SL https://ftp.gnu.org/gnu/gcc/gcc-13.3.0/gcc-13.3.0.tar.xz | tar -xJf - - fi - CFLAGS="-fPIC" - CXXFLAGS="-fPIC" - CXXFLAGS_FOR_TARGET="-fPIC" - CFLAGS_FOR_TARGET="-fPIC" - export CFLAGS CXXFLAGS CXXFLAGS_FOR_TARGET CFLAGS_FOR_TARGET - mkdir build-gcc && cd build-gcc - ../gcc-13.3.0/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.11" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \ - --with-gnu-ld --disable-nls --disable-libgomp --disable-libquadmath --disable-libssp --disable-libvtv --disable-libcilkrts --disable-libada --disable-libsanitizer \ - --disable-libquadmath-support --disable-shared --enable-tls - make -j "$JOBS" && make install && cd .. - BaseUrl=https://pkgsrc.smartos.org - if [[ "$__UseMirror" == 1 ]]; then - BaseUrl=https://pkgsrc.smartos.skylime.net - fi - BaseUrl="$BaseUrl/packages/SmartOS/2019Q4/${__illumosArch}/All" - echo "Downloading manifest" - if [[ "$__hasWget" == 1 ]]; then - wget "$BaseUrl" - else - curl -SLO "$BaseUrl" - fi - echo "Downloading dependencies." - read -ra array <<<"$__IllumosPackages" - for package in "${array[@]}"; do - echo "Installing '$package'" - # find last occurrence of package in listing and extract its name - package="$(sed -En '/.*href="http://github.com/('"$package"'-[0-9].*).tgz".*/h;$!d;g;s//\1/p' All)" - echo "Resolved name '$package'" - if [[ "$__hasWget" == 1 ]]; then - wget "$BaseUrl"/"$package".tgz - else - curl -SLO "$BaseUrl"/"$package".tgz - fi - ar -x "$package".tgz - tar --skip-old-files -xzf "$package".tmp.tg* -C "$__RootfsDir" 2>/dev/null - done - echo "Cleaning up temporary files." - popd - rm -rf "$__RootfsDir"/{tmp,+*} - mkdir -p "$__RootfsDir"/usr/include/net - mkdir -p "$__RootfsDir"/usr/include/netpacket - if [[ "$__hasWget" == 1 ]]; then - wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h - wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h - wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h - wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h - else - curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h - curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h - curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h - curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h - fi -elif [[ "$__CodeName" == "haiku" ]]; then - JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} - - echo "Building Haiku sysroot for $__HaikuArch" - mkdir -p "$__RootfsDir/tmp" - pushd "$__RootfsDir/tmp" - - mkdir "$__RootfsDir/tmp/download" - - ensureDownloadTool - - echo "Downloading Haiku package tools" - git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 "$__RootfsDir/tmp/script" - if [[ "$__hasWget" == 1 ]]; then - wget -O "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)" - else - curl -SLo "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)" - fi - - unzip -o "$__RootfsDir/tmp/download/hosttools.zip" -d "$__RootfsDir/tmp/bin" - - HaikuBaseUrl="https://eu.hpkg.haiku-os.org/haiku/master/$__HaikuArch/current" - HaikuPortsBaseUrl="https://eu.hpkg.haiku-os.org/haikuports/master/$__HaikuArch/current" - - echo "Downloading HaikuPorts package repository index..." - if [[ "$__hasWget" == 1 ]]; then - wget -P "$__RootfsDir/tmp/download" "$HaikuPortsBaseUrl/repo" - else - curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HaikuPortsBaseUrl/repo" - fi - - echo "Downloading Haiku packages" - read -ra array <<<"$__HaikuPackages" - for package in "${array[@]}"; do - echo "Downloading $package..." - hpkgFilename="$(LD_LIBRARY_PATH="$__RootfsDir/tmp/bin" "$__RootfsDir/tmp/bin/package_repo" list -f "$__RootfsDir/tmp/download/repo" | - grep -E "${package}-" | sort -V | tail -n 1 | xargs)" - if [ -z "$hpkgFilename" ]; then - >&2 echo "ERROR: package $package missing." - exit 1 - fi - echo "Resolved filename: $hpkgFilename..." - hpkgDownloadUrl="$HaikuPortsBaseUrl/packages/$hpkgFilename" - if [[ "$__hasWget" == 1 ]]; then - wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl" - else - curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$hpkgDownloadUrl" - fi - done - for package in haiku haiku_devel; do - echo "Downloading $package..." - if [[ "$__hasWget" == 1 ]]; then - hpkgVersion="$(wget -qO- "$HaikuBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')" - wget -P "$__RootfsDir/tmp/download" "$HaikuBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg" - else - hpkgVersion="$(curl -sSL "$HaikuBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')" - curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HaikuBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg" - fi - done - - # Set up the sysroot - echo "Setting up sysroot and extracting required packages" - mkdir -p "$__RootfsDir/boot/system" - for file in "$__RootfsDir/tmp/download/"*.hpkg; do - echo "Extracting $file..." - LD_LIBRARY_PATH="$__RootfsDir/tmp/bin" "$__RootfsDir/tmp/bin/package" extract -C "$__RootfsDir/boot/system" "$file" - done - - # Download buildtools - echo "Downloading Haiku buildtools" - if [[ "$__hasWget" == 1 ]]; then - wget -O "$__RootfsDir/tmp/download/buildtools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --buildtools --arch=$__HaikuArch)" - else - curl -SLo "$__RootfsDir/tmp/download/buildtools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --buildtools --arch=$__HaikuArch)" - fi - unzip -o "$__RootfsDir/tmp/download/buildtools.zip" -d "$__RootfsDir" - - # Cleaning up temporary files - echo "Cleaning up temporary files" - popd - rm -rf "$__RootfsDir/tmp" -elif [[ -n "$__CodeName" ]]; then - __Suites="$__CodeName $(for suite in $__UbuntuSuites; do echo -n "$__CodeName-$suite "; done)" - - if [[ "$__SkipEmulation" == "1" ]]; then - if [[ -z "$AR" ]]; then - if command -v ar &>/dev/null; then - AR="$(command -v ar)" - elif command -v llvm-ar &>/dev/null; then - AR="$(command -v llvm-ar)" - else - echo "Unable to find ar or llvm-ar on PATH, add them to PATH or set AR environment variable pointing to the available AR tool" - exit 1 - fi - fi - - PYTHON=${PYTHON_EXECUTABLE:-python3} - - # shellcheck disable=SC2086,SC2046 - echo running "$PYTHON" "$__CrossDir/install-debs.py" --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \ - $(for suite in $__Suites; do echo -n "--suite $suite "; done) \ - $__UbuntuPackages - - # shellcheck disable=SC2086,SC2046 - "$PYTHON" "$__CrossDir/install-debs.py" --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \ - $(for suite in $__Suites; do echo -n "--suite $suite "; done) \ - $__UbuntuPackages - - exit 0 - fi - - __UpdateOptions= - if [[ "$__SkipSigCheck" == "0" ]]; then - __Keyring="$__Keyring --force-check-gpg" - else - __Keyring= - __UpdateOptions="--allow-unauthenticated --allow-insecure-repositories" - fi - - # shellcheck disable=SC2086 - echo running debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo" - - # shellcheck disable=SC2086 - if ! debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"; then - echo "debootstrap failed! dumping debootstrap.log" - cat "$__RootfsDir/debootstrap/debootstrap.log" - exit 1 - fi - - rm -rf "$__RootfsDir"/etc/apt/*.{sources,list} "$__RootfsDir"/etc/apt/sources.list.d - mkdir -p "$__RootfsDir/etc/apt/sources.list.d/" - - # shellcheck disable=SC2086 - cat > "$__RootfsDir/etc/apt/sources.list.d/$__CodeName.sources" < token2) - (token1 < token2) - else: - return -1 if isinstance(token1, str) else 1 - - return len(tokens1) - len(tokens2) - -def compare_debian_versions(version1, version2): - """Compare two Debian package versions.""" - epoch1, upstream1, revision1 = parse_debian_version(version1) - epoch2, upstream2, revision2 = parse_debian_version(version2) - - if epoch1 != epoch2: - return epoch1 - epoch2 - - result = compare_upstream_version(upstream1, upstream2) - if result != 0: - return result - - return compare_upstream_version(revision1, revision2) - -def resolve_dependencies(packages, aliases, desired_packages): - """Recursively resolves dependencies for the desired packages.""" - resolved = [] - to_process = deque(desired_packages) - - while to_process: - current = to_process.popleft() - resolved_package = current if current in packages else aliases.get(current, [None])[0] - - if not resolved_package: - print(f"Error: Package '{current}' was not found in the available packages.") - sys.exit(1) - - if resolved_package not in resolved: - resolved.append(resolved_package) - - deps = packages.get(resolved_package, {}).get("Depends", "") - if deps: - deps = [dep.split(' ')[0] for dep in deps.split(', ') if dep] - for dep in deps: - if dep not in resolved and dep not in to_process and dep in packages: - to_process.append(dep) - - return resolved - -def parse_package_index(content): - """Parses the Packages.gz file and returns package information.""" - packages = {} - aliases = {} - entries = re.split(r'\n\n+', content) - - for entry in entries: - fields = dict(re.findall(r'^(\S+): (.+)$', entry, re.MULTILINE)) - if "Package" in fields: - package_name = fields["Package"] - version = fields.get("Version") - filename = fields.get("Filename") - depends = fields.get("Depends") - provides = fields.get("Provides", None) - - # Only update if package_name is not in packages or if the new version is higher - if package_name not in packages or compare_debian_versions(version, packages[package_name]["Version"]) > 0: - packages[package_name] = { - "Version": version, - "Filename": filename, - "Depends": depends - } - - # Update aliases if package provides any alternatives - if provides: - provides_list = [x.strip() for x in provides.split(",")] - for alias in provides_list: - # Strip version specifiers - alias_name = re.sub(r'\s*\(=.*\)', '', alias) - if alias_name not in aliases: - aliases[alias_name] = [] - if package_name not in aliases[alias_name]: - aliases[alias_name].append(package_name) - - return packages, aliases - -def install_packages(mirror, packages_info, aliases, tmp_dir, extract_dir, ar_tool, desired_packages): - """Downloads .deb files and extracts them.""" - resolved_packages = resolve_dependencies(packages_info, aliases, desired_packages) - print(f"Resolved packages (including dependencies): {resolved_packages}") - - packages_to_download = {} - - for pkg in resolved_packages: - if pkg in packages_info: - packages_to_download[pkg] = packages_info[pkg] - - if pkg in aliases: - for alias in aliases[pkg]: - if alias in packages_info: - packages_to_download[alias] = packages_info[alias] - - asyncio.run(download_deb_files_parallel(mirror, packages_to_download, tmp_dir)) - - package_to_deb_file_map = {} - for pkg in resolved_packages: - pkg_info = packages_info.get(pkg) - if pkg_info: - deb_filename = pkg_info.get("Filename") - if deb_filename: - deb_file_path = os.path.join(tmp_dir, os.path.basename(deb_filename)) - package_to_deb_file_map[pkg] = deb_file_path - - for pkg in reversed(resolved_packages): - deb_file = package_to_deb_file_map.get(pkg) - if deb_file and os.path.exists(deb_file): - extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool) - - print("All done!") - -def extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool): - """Extract .deb file contents""" - - os.makedirs(extract_dir, exist_ok=True) - - with tempfile.TemporaryDirectory(dir=tmp_dir) as tmp_subdir: - result = subprocess.run(f"{ar_tool} t {os.path.abspath(deb_file)}", cwd=tmp_subdir, check=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - - tar_filename = None - for line in result.stdout.decode().splitlines(): - if line.startswith("data.tar"): - tar_filename = line.strip() - break - - if not tar_filename: - raise FileNotFoundError(f"Could not find 'data.tar.*' in {deb_file}.") - - tar_file_path = os.path.join(tmp_subdir, tar_filename) - print(f"Extracting {tar_filename} from {deb_file}..") - - subprocess.run(f"{ar_tool} p {os.path.abspath(deb_file)} {tar_filename} > {tar_file_path}", check=True, shell=True) - - file_extension = os.path.splitext(tar_file_path)[1].lower() - - if file_extension == ".xz": - mode = "r:xz" - elif file_extension == ".gz": - mode = "r:gz" - elif file_extension == ".zst": - # zstd is not supported by standard library yet - decompressed_tar_path = tar_file_path.replace(".zst", "") - with open(tar_file_path, "rb") as zst_file, open(decompressed_tar_path, "wb") as decompressed_file: - dctx = zstandard.ZstdDecompressor() - dctx.copy_stream(zst_file, decompressed_file) - - tar_file_path = decompressed_tar_path - mode = "r" - else: - raise ValueError(f"Unsupported compression format: {file_extension}") - - with tarfile.open(tar_file_path, mode) as tar: - tar.extractall(path=extract_dir, filter='fully_trusted') - -def finalize_setup(rootfsdir): - lib_dir = os.path.join(rootfsdir, 'lib') - usr_lib_dir = os.path.join(rootfsdir, 'usr', 'lib') - - if os.path.exists(lib_dir): - if os.path.islink(lib_dir): - os.remove(lib_dir) - else: - os.makedirs(usr_lib_dir, exist_ok=True) - - for item in os.listdir(lib_dir): - src = os.path.join(lib_dir, item) - dest = os.path.join(usr_lib_dir, item) - - if os.path.isdir(src): - shutil.copytree(src, dest, dirs_exist_ok=True) - else: - shutil.copy2(src, dest) - - shutil.rmtree(lib_dir) - - os.symlink(usr_lib_dir, lib_dir) - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Generate rootfs for .NET runtime on Debian-like OS") - parser.add_argument("--distro", required=False, help="Distro name (e.g., debian, ubuntu, etc.)") - parser.add_argument("--arch", required=True, help="Architecture (e.g., amd64, loong64, etc.)") - parser.add_argument("--rootfsdir", required=True, help="Destination directory.") - parser.add_argument('--suite', required=True, action='http://github.com/append', help='Specify one or more repository suites to collect index data.') - parser.add_argument("--mirror", required=False, help="Mirror (e.g., http://ftp.debian.org/debian-ports etc.)") - parser.add_argument("--artool", required=False, default="ar", help="ar tool to extract debs (e.g., ar, llvm-ar etc.)") - parser.add_argument("packages", nargs="+", help="List of package names to be installed.") - - args = parser.parse_args() - - if args.mirror is None: - if args.distro == "ubuntu": - args.mirror = "http://archive.ubuntu.com/ubuntu" if args.arch in ["amd64", "i386"] else "http://ports.ubuntu.com/ubuntu-ports" - elif args.distro == "debian": - args.mirror = "http://ftp.debian.org/debian-ports" - else: - raise Exception("Unsupported distro") - - DESIRED_PACKAGES = args.packages + [ # base packages - "dpkg", - "busybox", - "libc-bin", - "base-files", - "base-passwd", - "debianutils" - ] - - print(f"Creating rootfs. rootfsdir: {args.rootfsdir}, distro: {args.distro}, arch: {args.arch}, suites: {args.suite}, mirror: {args.mirror}") - - package_index_content = asyncio.run(download_package_index_parallel(args.mirror, args.arch, args.suite)) - - packages_info, aliases = parse_package_index(package_index_content) - - with tempfile.TemporaryDirectory() as tmp_dir: - install_packages(args.mirror, packages_info, aliases, tmp_dir, args.rootfsdir, args.artool, DESIRED_PACKAGES) - - finalize_setup(args.rootfsdir) diff --git a/eng/common/cross/riscv64/tizen/tizen.patch b/eng/common/cross/riscv64/tizen/tizen.patch deleted file mode 100644 index eb6d1c074..000000000 --- a/eng/common/cross/riscv64/tizen/tizen.patch +++ /dev/null @@ -1,9 +0,0 @@ -diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so ---- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900 -+++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900 -@@ -2,4 +2,4 @@ - Use the shared library, but some functions are only in - the static library, so try that secondarily. */ - OUTPUT_FORMAT(elf64-littleriscv) --GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-riscv64-lp64d.so.1 ) ) -+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-riscv64-lp64d.so.1 ) ) diff --git a/eng/common/cross/tizen-build-rootfs.sh b/eng/common/cross/tizen-build-rootfs.sh deleted file mode 100755 index ba31c9328..000000000 --- a/eng/common/cross/tizen-build-rootfs.sh +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/env bash -set -e - -ARCH=$1 -LINK_ARCH=$ARCH - -case "$ARCH" in - arm) - TIZEN_ARCH="armv7hl" - ;; - armel) - TIZEN_ARCH="armv7l" - LINK_ARCH="arm" - ;; - arm64) - TIZEN_ARCH="aarch64" - ;; - x86) - TIZEN_ARCH="i686" - ;; - x64) - TIZEN_ARCH="x86_64" - LINK_ARCH="x86" - ;; - riscv64) - TIZEN_ARCH="riscv64" - LINK_ARCH="riscv" - ;; - *) - echo "Unsupported architecture for tizen: $ARCH" - exit 1 -esac - -__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) -__TIZEN_CROSSDIR="$__CrossDir/${ARCH}/tizen" - -if [[ -z "$ROOTFS_DIR" ]]; then - echo "ROOTFS_DIR is not defined." - exit 1; -fi - -TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp -mkdir -p $TIZEN_TMP_DIR - -# Download files -echo ">>Start downloading files" -VERBOSE=1 $__CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR $TIZEN_ARCH -echo "<>Start constructing Tizen rootfs" -TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm` -cd $ROOTFS_DIR -for f in $TIZEN_RPM_FILES; do - rpm2cpio $f | cpio -idm --quiet -done -echo "<>Start configuring Tizen rootfs" -ln -sfn asm-${LINK_ARCH} ./usr/include/asm -patch -p1 < $__TIZEN_CROSSDIR/tizen.patch -if [[ "$TIZEN_ARCH" == "riscv64" ]]; then - echo "Fixing broken symlinks in $PWD" - rm ./usr/lib64/libresolv.so - ln -s ../../lib64/libresolv.so.2 ./usr/lib64/libresolv.so - rm ./usr/lib64/libpthread.so - ln -s ../../lib64/libpthread.so.0 ./usr/lib64/libpthread.so - rm ./usr/lib64/libdl.so - ln -s ../../lib64/libdl.so.2 ./usr/lib64/libdl.so - rm ./usr/lib64/libutil.so - ln -s ../../lib64/libutil.so.1 ./usr/lib64/libutil.so - rm ./usr/lib64/libm.so - ln -s ../../lib64/libm.so.6 ./usr/lib64/libm.so - rm ./usr/lib64/librt.so - ln -s ../../lib64/librt.so.1 ./usr/lib64/librt.so - rm ./lib/ld-linux-riscv64-lp64d.so.1 - ln -s ../lib64/ld-linux-riscv64-lp64d.so.1 ./lib/ld-linux-riscv64-lp64d.so.1 -fi -echo "</dev/null; then - VERBOSE=0 -fi - -Log() -{ - if [ $VERBOSE -ge 1 ]; then - echo ${@:2} - fi -} - -Inform() -{ - Log 1 -e "\x1B[0;34m$@\x1B[m" -} - -Debug() -{ - Log 2 -e "\x1B[0;32m$@\x1B[m" -} - -Error() -{ - >&2 Log 0 -e "\x1B[0;31m$@\x1B[m" -} - -Fetch() -{ - URL=$1 - FILE=$2 - PROGRESS=$3 - if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then - CURL_OPT="--progress-bar" - else - CURL_OPT="--silent" - fi - curl $CURL_OPT $URL > $FILE -} - -hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; } -hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; } -hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; } - -TMPDIR=$1 -if [ ! -d $TMPDIR ]; then - TMPDIR=./tizen_tmp - Debug "Create temporary directory : $TMPDIR" - mkdir -p $TMPDIR -fi - -TIZEN_ARCH=$2 - -TIZEN_URL=http://download.tizen.org/snapshots/TIZEN/Tizen -BUILD_XML=build.xml -REPOMD_XML=repomd.xml -PRIMARY_XML=primary.xml -TARGET_URL="http://__not_initialized/" - -Xpath_get() -{ - XPATH_RESULT='' - XPATH=$1 - XML_FILE=$2 - RESULT=$(xmllint --xpath $XPATH $XML_FILE) - if [[ -z ${RESULT// } ]]; then - Error "Can not find target from $XML_FILE" - Debug "Xpath = $XPATH" - exit 1 - fi - XPATH_RESULT=$RESULT -} - -fetch_tizen_pkgs_init() -{ - TARGET=$1 - PROFILE=$2 - Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE" - - TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs - if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi - mkdir -p $TMP_PKG_DIR - - PKG_URL=$TIZEN_URL/$PROFILE/latest - - BUILD_XML_URL=$PKG_URL/$BUILD_XML - TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML - TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML - TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML - TMP_PRIMARYGZ=${TMP_PRIMARY}.gz - - Fetch $BUILD_XML_URL $TMP_BUILD - - Debug "fetch $BUILD_XML_URL to $TMP_BUILD" - - TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()" - Xpath_get $TARGET_XPATH $TMP_BUILD - TARGET_PATH=$XPATH_RESULT - TARGET_URL=$PKG_URL/$TARGET_PATH - - REPOMD_URL=$TARGET_URL/repodata/repomd.xml - PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)' - - Fetch $REPOMD_URL $TMP_REPOMD - - Debug "fetch $REPOMD_URL to $TMP_REPOMD" - - Xpath_get $PRIMARY_XPATH $TMP_REPOMD - PRIMARY_XML_PATH=$XPATH_RESULT - PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH - - Fetch $PRIMARY_URL $TMP_PRIMARYGZ - - Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ" - - gunzip $TMP_PRIMARYGZ - - Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY" -} - -fetch_tizen_pkgs() -{ - ARCH=$1 - PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)' - - PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())' - - for pkg in ${@:2} - do - Inform "Fetching... $pkg" - XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg} - XPATH=${XPATH/_ARCH_/$ARCH} - Xpath_get $XPATH $TMP_PRIMARY - PKG_PATH=$XPATH_RESULT - - XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg} - XPATH=${XPATH/_ARCH_/$ARCH} - Xpath_get $XPATH $TMP_PRIMARY - CHECKSUM=$XPATH_RESULT - - PKG_URL=$TARGET_URL/$PKG_PATH - PKG_FILE=$(basename $PKG_PATH) - PKG_PATH=$TMPDIR/$PKG_FILE - - Debug "Download $PKG_URL to $PKG_PATH" - Fetch $PKG_URL $PKG_PATH true - - echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null - if [ $? -ne 0 ]; then - Error "Fail to fetch $PKG_URL to $PKG_PATH" - Debug "Checksum = $CHECKSUM" - exit 1 - fi - done -} - -BASE="Tizen-Base" -UNIFIED="Tizen-Unified" - -Inform "Initialize ${TIZEN_ARCH} base" -fetch_tizen_pkgs_init standard $BASE -Inform "fetch common packages" -fetch_tizen_pkgs ${TIZEN_ARCH} gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils -Inform "fetch coreclr packages" -fetch_tizen_pkgs ${TIZEN_ARCH} libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu -if [ "$TIZEN_ARCH" != "riscv64" ]; then - fetch_tizen_pkgs ${TIZEN_ARCH} lldb lldb-devel -fi -Inform "fetch corefx packages" -fetch_tizen_pkgs ${TIZEN_ARCH} libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel - -Inform "Initialize standard unified" -fetch_tizen_pkgs_init standard $UNIFIED -Inform "fetch corefx packages" -fetch_tizen_pkgs ${TIZEN_ARCH} gssdp gssdp-devel tizen-release - diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake deleted file mode 100644 index 0ff85cf03..000000000 --- a/eng/common/cross/toolchain.cmake +++ /dev/null @@ -1,387 +0,0 @@ -set(CROSS_ROOTFS $ENV{ROOTFS_DIR}) - -# reset platform variables (e.g. cmake 3.25 sets LINUX=1) -unset(LINUX) -unset(FREEBSD) -unset(ILLUMOS) -unset(ANDROID) -unset(TIZEN) -unset(HAIKU) - -set(TARGET_ARCH_NAME $ENV{TARGET_BUILD_ARCH}) -if(EXISTS ${CROSS_ROOTFS}/bin/freebsd-version) - set(CMAKE_SYSTEM_NAME FreeBSD) - set(FREEBSD 1) -elseif(EXISTS ${CROSS_ROOTFS}/usr/platform/i86pc) - set(CMAKE_SYSTEM_NAME SunOS) - set(ILLUMOS 1) -elseif(EXISTS ${CROSS_ROOTFS}/boot/system/develop/headers/config/HaikuConfig.h) - set(CMAKE_SYSTEM_NAME Haiku) - set(HAIKU 1) -else() - set(CMAKE_SYSTEM_NAME Linux) - set(LINUX 1) -endif() -set(CMAKE_SYSTEM_VERSION 1) - -if(EXISTS ${CROSS_ROOTFS}/etc/tizen-release) - set(TIZEN 1) -elseif(EXISTS ${CROSS_ROOTFS}/android_platform) - set(ANDROID 1) -endif() - -if(TARGET_ARCH_NAME STREQUAL "arm") - set(CMAKE_SYSTEM_PROCESSOR armv7l) - if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv7-alpine-linux-musleabihf) - set(TOOLCHAIN "armv7-alpine-linux-musleabihf") - elseif(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf) - set(TOOLCHAIN "armv6-alpine-linux-musleabihf") - else() - set(TOOLCHAIN "arm-linux-gnueabihf") - endif() - if(TIZEN) - set(TIZEN_TOOLCHAIN "armv7hl-tizen-linux-gnueabihf") - endif() -elseif(TARGET_ARCH_NAME STREQUAL "arm64") - set(CMAKE_SYSTEM_PROCESSOR aarch64) - if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/aarch64-alpine-linux-musl) - set(TOOLCHAIN "aarch64-alpine-linux-musl") - elseif(LINUX) - set(TOOLCHAIN "aarch64-linux-gnu") - if(TIZEN) - set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu") - endif() - elseif(FREEBSD) - set(triple "aarch64-unknown-freebsd12") - endif() -elseif(TARGET_ARCH_NAME STREQUAL "armel") - set(CMAKE_SYSTEM_PROCESSOR armv7l) - set(TOOLCHAIN "arm-linux-gnueabi") - if(TIZEN) - set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi") - endif() -elseif(TARGET_ARCH_NAME STREQUAL "armv6") - set(CMAKE_SYSTEM_PROCESSOR armv6l) - if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf) - set(TOOLCHAIN "armv6-alpine-linux-musleabihf") - else() - set(TOOLCHAIN "arm-linux-gnueabihf") - endif() -elseif(TARGET_ARCH_NAME STREQUAL "loongarch64") - set(CMAKE_SYSTEM_PROCESSOR "loongarch64") - if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/loongarch64-alpine-linux-musl) - set(TOOLCHAIN "loongarch64-alpine-linux-musl") - else() - set(TOOLCHAIN "loongarch64-linux-gnu") - endif() -elseif(TARGET_ARCH_NAME STREQUAL "ppc64le") - set(CMAKE_SYSTEM_PROCESSOR ppc64le) - if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/powerpc64le-alpine-linux-musl) - set(TOOLCHAIN "powerpc64le-alpine-linux-musl") - else() - set(TOOLCHAIN "powerpc64le-linux-gnu") - endif() -elseif(TARGET_ARCH_NAME STREQUAL "riscv64") - set(CMAKE_SYSTEM_PROCESSOR riscv64) - if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/riscv64-alpine-linux-musl) - set(TOOLCHAIN "riscv64-alpine-linux-musl") - else() - set(TOOLCHAIN "riscv64-linux-gnu") - if(TIZEN) - set(TIZEN_TOOLCHAIN "riscv64-tizen-linux-gnu") - endif() - endif() -elseif(TARGET_ARCH_NAME STREQUAL "s390x") - set(CMAKE_SYSTEM_PROCESSOR s390x) - if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/s390x-alpine-linux-musl) - set(TOOLCHAIN "s390x-alpine-linux-musl") - else() - set(TOOLCHAIN "s390x-linux-gnu") - endif() -elseif(TARGET_ARCH_NAME STREQUAL "x64") - set(CMAKE_SYSTEM_PROCESSOR x86_64) - if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/x86_64-alpine-linux-musl) - set(TOOLCHAIN "x86_64-alpine-linux-musl") - elseif(LINUX) - set(TOOLCHAIN "x86_64-linux-gnu") - if(TIZEN) - set(TIZEN_TOOLCHAIN "x86_64-tizen-linux-gnu") - endif() - elseif(FREEBSD) - set(triple "x86_64-unknown-freebsd12") - elseif(ILLUMOS) - set(TOOLCHAIN "x86_64-illumos") - elseif(HAIKU) - set(TOOLCHAIN "x86_64-unknown-haiku") - endif() -elseif(TARGET_ARCH_NAME STREQUAL "x86") - set(CMAKE_SYSTEM_PROCESSOR i686) - if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl) - set(TOOLCHAIN "i586-alpine-linux-musl") - else() - set(TOOLCHAIN "i686-linux-gnu") - endif() - if(TIZEN) - set(TIZEN_TOOLCHAIN "i586-tizen-linux-gnu") - endif() -else() - message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only arm, arm64, armel, armv6, loongarch64, ppc64le, riscv64, s390x, x64 and x86 are supported!") -endif() - -if(DEFINED ENV{TOOLCHAIN}) - set(TOOLCHAIN $ENV{TOOLCHAIN}) -endif() - -# Specify include paths -if(TIZEN) - function(find_toolchain_dir prefix) - # Dynamically find the version subdirectory - file(GLOB DIRECTORIES "${prefix}/*") - list(GET DIRECTORIES 0 FIRST_MATCH) - get_filename_component(TOOLCHAIN_VERSION ${FIRST_MATCH} NAME) - - set(TIZEN_TOOLCHAIN_PATH "${prefix}/${TOOLCHAIN_VERSION}" PARENT_SCOPE) - endfunction() - - if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$") - find_toolchain_dir("${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") - else() - find_toolchain_dir("${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}") - endif() - - message(STATUS "TIZEN_TOOLCHAIN_PATH set to: ${TIZEN_TOOLCHAIN_PATH}") - - include_directories(SYSTEM ${TIZEN_TOOLCHAIN_PATH}/include/c++) - include_directories(SYSTEM ${TIZEN_TOOLCHAIN_PATH}/include/c++/${TIZEN_TOOLCHAIN}) -endif() - -function(locate_toolchain_exec exec var) - set(TOOLSET_PREFIX ${TOOLCHAIN}-) - string(TOUPPER ${exec} EXEC_UPPERCASE) - if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "") - set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE) - return() - endif() - - find_program(EXEC_LOCATION_${exec} - NAMES - "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}" - "${TOOLSET_PREFIX}${exec}") - - if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND") - message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.") - endif() - set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE) -endfunction() - -if(ANDROID) - if(TARGET_ARCH_NAME STREQUAL "arm") - set(ANDROID_ABI armeabi-v7a) - elseif(TARGET_ARCH_NAME STREQUAL "arm64") - set(ANDROID_ABI arm64-v8a) - endif() - - # extract platform number required by the NDK's toolchain - file(READ "${CROSS_ROOTFS}/android_platform" RID_FILE_CONTENTS) - string(REPLACE "RID=" "" ANDROID_RID "${RID_FILE_CONTENTS}") - string(REGEX REPLACE ".*\\.([0-9]+)-.*" "\\1" ANDROID_PLATFORM "${ANDROID_RID}") - - set(ANDROID_TOOLCHAIN clang) - set(FEATURE_EVENT_TRACE 0) # disable event trace as there is no lttng-ust package in termux repository - set(CMAKE_SYSTEM_LIBRARY_PATH "${CROSS_ROOTFS}/usr/lib") - set(CMAKE_SYSTEM_INCLUDE_PATH "${CROSS_ROOTFS}/usr/include") - - # include official NDK toolchain script - include(${CROSS_ROOTFS}/../build/cmake/android.toolchain.cmake) -elseif(FREEBSD) - # we cross-compile by instructing clang - set(CMAKE_C_COMPILER_TARGET ${triple}) - set(CMAKE_CXX_COMPILER_TARGET ${triple}) - set(CMAKE_ASM_COMPILER_TARGET ${triple}) - set(CMAKE_SYSROOT "${CROSS_ROOTFS}") - set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=lld") - set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fuse-ld=lld") - set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -fuse-ld=lld") -elseif(ILLUMOS) - set(CMAKE_SYSROOT "${CROSS_ROOTFS}") - set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}") - set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp") - set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp") - - include_directories(SYSTEM ${CROSS_ROOTFS}/include) - - locate_toolchain_exec(gcc CMAKE_C_COMPILER) - locate_toolchain_exec(g++ CMAKE_CXX_COMPILER) -elseif(HAIKU) - set(CMAKE_SYSROOT "${CROSS_ROOTFS}") - set(CMAKE_PROGRAM_PATH "${CMAKE_PROGRAM_PATH};${CROSS_ROOTFS}/cross-tools-x86_64/bin") - set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}") - set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp") - set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp") - - locate_toolchain_exec(gcc CMAKE_C_COMPILER) - locate_toolchain_exec(g++ CMAKE_CXX_COMPILER) - - # let CMake set up the correct search paths - include(Platform/Haiku) -else() - set(CMAKE_SYSROOT "${CROSS_ROOTFS}") - - set(CMAKE_C_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr") - set(CMAKE_CXX_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr") - set(CMAKE_ASM_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr") -endif() - -# Specify link flags - -function(add_toolchain_linker_flag Flag) - set(Config "${ARGV1}") - set(CONFIG_SUFFIX "") - if (NOT Config STREQUAL "") - set(CONFIG_SUFFIX "_${Config}") - endif() - set("CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}_INIT" "${CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}_INIT} ${Flag}" PARENT_SCOPE) - set("CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT" "${CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT} ${Flag}" PARENT_SCOPE) -endfunction() - -if(LINUX) - add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib/${TOOLCHAIN}") - add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}") -endif() - -if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$") - if(TIZEN) - add_toolchain_linker_flag("-B${TIZEN_TOOLCHAIN_PATH}") - add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib") - add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib") - add_toolchain_linker_flag("-L${TIZEN_TOOLCHAIN_PATH}") - endif() -elseif(TARGET_ARCH_NAME MATCHES "^(arm64|x64|riscv64)$") - if(TIZEN) - add_toolchain_linker_flag("-B${TIZEN_TOOLCHAIN_PATH}") - add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib64") - add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64") - add_toolchain_linker_flag("-L${TIZEN_TOOLCHAIN_PATH}") - - add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib64") - add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64") - add_toolchain_linker_flag("-Wl,--rpath-link=${TIZEN_TOOLCHAIN_PATH}") - endif() -elseif(TARGET_ARCH_NAME STREQUAL "s390x") - add_toolchain_linker_flag("--target=${TOOLCHAIN}") -elseif(TARGET_ARCH_NAME STREQUAL "x86") - if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl) - add_toolchain_linker_flag("--target=${TOOLCHAIN}") - add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}") - endif() - add_toolchain_linker_flag(-m32) - if(TIZEN) - add_toolchain_linker_flag("-B${TIZEN_TOOLCHAIN_PATH}") - add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib") - add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib") - add_toolchain_linker_flag("-L${TIZEN_TOOLCHAIN_PATH}") - endif() -elseif(ILLUMOS) - add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib/amd64") - add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/amd64/lib") -elseif(HAIKU) - add_toolchain_linker_flag("-lnetwork") - add_toolchain_linker_flag("-lroot") -endif() - -# Specify compile options - -if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|loongarch64|ppc64le|riscv64|s390x|x64|x86)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU) - set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN}) - set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN}) - set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN}) -endif() - -if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$") - add_compile_options(-mthumb) - if (NOT DEFINED CLR_ARM_FPU_TYPE) - set (CLR_ARM_FPU_TYPE vfpv3) - endif (NOT DEFINED CLR_ARM_FPU_TYPE) - - add_compile_options (-mfpu=${CLR_ARM_FPU_TYPE}) - if (NOT DEFINED CLR_ARM_FPU_CAPABILITY) - set (CLR_ARM_FPU_CAPABILITY 0x7) - endif (NOT DEFINED CLR_ARM_FPU_CAPABILITY) - - add_definitions (-DCLR_ARM_FPU_CAPABILITY=${CLR_ARM_FPU_CAPABILITY}) - - # persist variables across multiple try_compile passes - list(APPEND CMAKE_TRY_COMPILE_PLATFORM_VARIABLES CLR_ARM_FPU_TYPE CLR_ARM_FPU_CAPABILITY) - - if(TARGET_ARCH_NAME STREQUAL "armel") - add_compile_options(-mfloat-abi=softfp) - endif() -elseif(TARGET_ARCH_NAME STREQUAL "s390x") - add_compile_options("--target=${TOOLCHAIN}") -elseif(TARGET_ARCH_NAME STREQUAL "x86") - if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl) - add_compile_options(--target=${TOOLCHAIN}) - endif() - add_compile_options(-m32) - add_compile_options(-Wno-error=unused-command-line-argument) -endif() - -if(TIZEN) - if(TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64|x86)$") - add_compile_options(-Wno-deprecated-declarations) # compile-time option - add_compile_options(-D__extern_always_inline=inline) # compile-time option - endif() -endif() - -# Set LLDB include and library paths for builds that need lldb. -if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$") - if(TARGET_ARCH_NAME STREQUAL "x86") - set(LLVM_CROSS_DIR "$ENV{LLVM_CROSS_HOME}") - else() # arm/armel case - set(LLVM_CROSS_DIR "$ENV{LLVM_ARM_HOME}") - endif() - if(LLVM_CROSS_DIR) - set(WITH_LLDB_LIBS "${LLVM_CROSS_DIR}/lib/" CACHE STRING "") - set(WITH_LLDB_INCLUDES "${LLVM_CROSS_DIR}/include" CACHE STRING "") - set(LLDB_H "${WITH_LLDB_INCLUDES}" CACHE STRING "") - set(LLDB "${LLVM_CROSS_DIR}/lib/liblldb.so" CACHE STRING "") - else() - if(TARGET_ARCH_NAME STREQUAL "x86") - set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/i386-linux-gnu" CACHE STRING "") - set(CHECK_LLVM_DIR "${CROSS_ROOTFS}/usr/lib/llvm-3.8/include") - if(EXISTS "${CHECK_LLVM_DIR}" AND IS_DIRECTORY "${CHECK_LLVM_DIR}") - set(WITH_LLDB_INCLUDES "${CHECK_LLVM_DIR}") - else() - set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include") - endif() - else() # arm/armel case - set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}" CACHE STRING "") - set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include" CACHE STRING "") - endif() - endif() -endif() - -# Set C++ standard library options if specified -set(CLR_CMAKE_CXX_STANDARD_LIBRARY "" CACHE STRING "Standard library flavor to link against. Only supported with the Clang compiler.") -if (CLR_CMAKE_CXX_STANDARD_LIBRARY) - add_compile_options($<$:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>) - add_link_options($<$:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>) -endif() - -option(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC "Statically link against the C++ standard library" OFF) -if(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC) - add_link_options($<$:-static-libstdc++>) -endif() - -set(CLR_CMAKE_CXX_ABI_LIBRARY "" CACHE STRING "C++ ABI implementation library to link against. Only supported with the Clang compiler.") -if (CLR_CMAKE_CXX_ABI_LIBRARY) - # The user may specify the ABI library with the 'lib' prefix, like 'libstdc++'. Strip the prefix here so the linker finds the right library. - string(REGEX REPLACE "^lib(.+)" "\\1" CLR_CMAKE_CXX_ABI_LIBRARY ${CLR_CMAKE_CXX_ABI_LIBRARY}) - # We need to specify this as a linker-backend option as Clang will filter this option out when linking to libc++. - add_link_options("LINKER:-l${CLR_CMAKE_CXX_ABI_LIBRARY}") -endif() - -set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) -set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) -set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) -set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) diff --git a/eng/common/cross/x64/tizen/tizen.patch b/eng/common/cross/x64/tizen/tizen.patch deleted file mode 100644 index 56fbc8810..000000000 --- a/eng/common/cross/x64/tizen/tizen.patch +++ /dev/null @@ -1,9 +0,0 @@ -diff -u -r a/usr/lib64/libc.so b/usr/lib64/libc.so ---- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900 -+++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900 -@@ -2,4 +2,4 @@ - Use the shared library, but some functions are only in - the static library, so try that secondarily. */ - OUTPUT_FORMAT(elf64-x86-64) --GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-x86-64.so.2 ) ) -+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-x86-64.so.2 ) ) diff --git a/eng/common/cross/x86/tizen/tizen.patch b/eng/common/cross/x86/tizen/tizen.patch deleted file mode 100644 index f4fe8838a..000000000 --- a/eng/common/cross/x86/tizen/tizen.patch +++ /dev/null @@ -1,9 +0,0 @@ -diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so ---- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900 -+++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900 -@@ -2,4 +2,4 @@ - Use the shared library, but some functions are only in - the static library, so try that secondarily. */ - OUTPUT_FORMAT(elf32-i386) --GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.2 ) ) -+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.2 ) ) diff --git a/eng/common/darc-init.ps1 b/eng/common/darc-init.ps1 deleted file mode 100644 index e33743105..000000000 --- a/eng/common/darc-init.ps1 +++ /dev/null @@ -1,47 +0,0 @@ -param ( - $darcVersion = $null, - $versionEndpoint = 'https://maestro.dot.net/api/assets/darc-version?api-version=2020-02-20', - $verbosity = 'minimal', - $toolpath = $null -) - -. $PSScriptRoot\tools.ps1 - -function InstallDarcCli ($darcVersion, $toolpath) { - $darcCliPackageName = 'microsoft.dotnet.darc' - - $dotnetRoot = InitializeDotNetCli -install:$true - $dotnet = "$dotnetRoot\dotnet.exe" - $toolList = & "$dotnet" tool list -g - - if ($toolList -like "*$darcCliPackageName*") { - & "$dotnet" tool uninstall $darcCliPackageName -g - } - - # If the user didn't explicitly specify the darc version, - # query the Maestro API for the correct version of darc to install. - if (-not $darcVersion) { - $darcVersion = $(Invoke-WebRequest -Uri $versionEndpoint -UseBasicParsing).Content - } - - $arcadeServicesSource = 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json' - - Write-Host "Installing Darc CLI version $darcVersion..." - Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.' - if (-not $toolpath) { - Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity -g" - & "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g - }else { - Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity --tool-path '$toolpath'" - & "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath" - } -} - -try { - InstallDarcCli $darcVersion $toolpath -} -catch { - Write-Host $_.ScriptStackTrace - Write-PipelineTelemetryError -Category 'Darc' -Message $_ - ExitWithExitCode 1 -} \ No newline at end of file diff --git a/eng/common/darc-init.sh b/eng/common/darc-init.sh deleted file mode 100755 index e889f439b..000000000 --- a/eng/common/darc-init.sh +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/env bash - -source="${BASH_SOURCE[0]}" -darcVersion='' -versionEndpoint='https://maestro.dot.net/api/assets/darc-version?api-version=2020-02-20' -verbosity='minimal' - -while [[ $# > 0 ]]; do - opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" - case "$opt" in - --darcversion) - darcVersion=$2 - shift - ;; - --versionendpoint) - versionEndpoint=$2 - shift - ;; - --verbosity) - verbosity=$2 - shift - ;; - --toolpath) - toolpath=$2 - shift - ;; - *) - echo "Invalid argument: $1" - usage - exit 1 - ;; - esac - - shift -done - -# resolve $source until the file is no longer a symlink -while [[ -h "$source" ]]; do - scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - source="$(readlink "$source")" - # if $source was a relative symlink, we need to resolve it relative to the path where the - # symlink file was located - [[ $source != /* ]] && source="$scriptroot/$source" -done -scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - -. "$scriptroot/tools.sh" - -if [ -z "$darcVersion" ]; then - darcVersion=$(curl -X GET "$versionEndpoint" -H "accept: text/plain") -fi - -function InstallDarcCli { - local darc_cli_package_name="microsoft.dotnet.darc" - - InitializeDotNetCli true - local dotnet_root=$_InitializeDotNetCli - - if [ -z "$toolpath" ]; then - local tool_list=$($dotnet_root/dotnet tool list -g) - if [[ $tool_list = *$darc_cli_package_name* ]]; then - echo $($dotnet_root/dotnet tool uninstall $darc_cli_package_name -g) - fi - else - local tool_list=$($dotnet_root/dotnet tool list --tool-path "$toolpath") - if [[ $tool_list = *$darc_cli_package_name* ]]; then - echo $($dotnet_root/dotnet tool uninstall $darc_cli_package_name --tool-path "$toolpath") - fi - fi - - local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json" - - echo "Installing Darc CLI version $darcVersion..." - echo "You may need to restart your command shell if this is the first dotnet tool you have installed." - if [ -z "$toolpath" ]; then - echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g) - else - echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath") - fi -} - -InstallDarcCli diff --git a/eng/common/dotnet-install.cmd b/eng/common/dotnet-install.cmd deleted file mode 100644 index b1c2642e7..000000000 --- a/eng/common/dotnet-install.cmd +++ /dev/null @@ -1,2 +0,0 @@ -@echo off -powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet-install.ps1""" %*" \ No newline at end of file diff --git a/eng/common/dotnet-install.ps1 b/eng/common/dotnet-install.ps1 deleted file mode 100644 index 811f0f717..000000000 --- a/eng/common/dotnet-install.ps1 +++ /dev/null @@ -1,28 +0,0 @@ -[CmdletBinding(PositionalBinding=$false)] -Param( - [string] $verbosity = 'minimal', - [string] $architecture = '', - [string] $version = 'Latest', - [string] $runtime = 'dotnet', - [string] $RuntimeSourceFeed = '', - [string] $RuntimeSourceFeedKey = '' -) - -. $PSScriptRoot\tools.ps1 - -$dotnetRoot = Join-Path $RepoRoot '.dotnet' - -$installdir = $dotnetRoot -try { - if ($architecture -and $architecture.Trim() -eq 'x86') { - $installdir = Join-Path $installdir 'x86' - } - InstallDotNet $installdir $version $architecture $runtime $true -RuntimeSourceFeed $RuntimeSourceFeed -RuntimeSourceFeedKey $RuntimeSourceFeedKey -} -catch { - Write-Host $_.ScriptStackTrace - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_ - ExitWithExitCode 1 -} - -ExitWithExitCode 0 diff --git a/eng/common/dotnet-install.sh b/eng/common/dotnet-install.sh deleted file mode 100755 index 7b9d97e3b..000000000 --- a/eng/common/dotnet-install.sh +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env bash - -source="${BASH_SOURCE[0]}" -# resolve $source until the file is no longer a symlink -while [[ -h "$source" ]]; do - scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - source="$(readlink "$source")" - # if $source was a relative symlink, we need to resolve it relative to the path where the - # symlink file was located - [[ $source != /* ]] && source="$scriptroot/$source" -done -scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - -. "$scriptroot/tools.sh" - -version='Latest' -architecture='' -runtime='dotnet' -runtimeSourceFeed='' -runtimeSourceFeedKey='' -while [[ $# > 0 ]]; do - opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" - case "$opt" in - -version|-v) - shift - version="$1" - ;; - -architecture|-a) - shift - architecture="$1" - ;; - -runtime|-r) - shift - runtime="$1" - ;; - -runtimesourcefeed) - shift - runtimeSourceFeed="$1" - ;; - -runtimesourcefeedkey) - shift - runtimeSourceFeedKey="$1" - ;; - *) - Write-PipelineTelemetryError -Category 'Build' -Message "Invalid argument: $1" - exit 1 - ;; - esac - shift -done - -# Use uname to determine what the CPU is, see https://en.wikipedia.org/wiki/Uname#Examples -cpuname=$(uname -m) -case $cpuname in - arm64|aarch64) - buildarch=arm64 - if [ "$(getconf LONG_BIT)" -lt 64 ]; then - # This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS) - buildarch=arm - fi - ;; - loongarch64) - buildarch=loongarch64 - ;; - amd64|x86_64) - buildarch=x64 - ;; - armv*l) - buildarch=arm - ;; - i[3-6]86) - buildarch=x86 - ;; - riscv64) - buildarch=riscv64 - ;; - *) - echo "Unknown CPU $cpuname detected, treating it as x64" - buildarch=x64 - ;; -esac - -dotnetRoot="${repo_root}.dotnet" -if [[ $architecture != "" ]] && [[ $architecture != $buildarch ]]; then - dotnetRoot="$dotnetRoot/$architecture" -fi - -InstallDotNet "$dotnetRoot" $version "$architecture" $runtime true $runtimeSourceFeed $runtimeSourceFeedKey || { - local exit_code=$? - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "dotnet-install.sh failed (exit code '$exit_code')." >&2 - ExitWithExitCode $exit_code -} - -ExitWithExitCode 0 diff --git a/eng/common/dotnet.cmd b/eng/common/dotnet.cmd deleted file mode 100644 index 527fa4bb3..000000000 --- a/eng/common/dotnet.cmd +++ /dev/null @@ -1,7 +0,0 @@ -@echo off - -:: This script is used to install the .NET SDK. -:: It will also invoke the SDK with any provided arguments. - -powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet.ps1""" %*" -exit /b %ErrorLevel% diff --git a/eng/common/dotnet.ps1 b/eng/common/dotnet.ps1 deleted file mode 100644 index 45e5676c9..000000000 --- a/eng/common/dotnet.ps1 +++ /dev/null @@ -1,11 +0,0 @@ -# This script is used to install the .NET SDK. -# It will also invoke the SDK with any provided arguments. - -. $PSScriptRoot\tools.ps1 -$dotnetRoot = InitializeDotNetCli -install:$true - -# Invoke acquired SDK with args if they are provided -if ($args.count -gt 0) { - $env:DOTNET_NOLOGO=1 - & "$dotnetRoot\dotnet.exe" $args -} diff --git a/eng/common/dotnet.sh b/eng/common/dotnet.sh deleted file mode 100644 index 2ef682356..000000000 --- a/eng/common/dotnet.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env bash - -# This script is used to install the .NET SDK. -# It will also invoke the SDK with any provided arguments. - -source="${BASH_SOURCE[0]}" -# resolve $SOURCE until the file is no longer a symlink -while [[ -h $source ]]; do - scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - source="$(readlink "$source")" - - # if $source was a relative symlink, we need to resolve it relative to the path where the - # symlink file was located - [[ $source != /* ]] && source="$scriptroot/$source" -done -scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - -source $scriptroot/tools.sh -InitializeDotNetCli true # install - -# Invoke acquired SDK with args if they are provided -if [[ $# > 0 ]]; then - __dotnetDir=${_InitializeDotNetCli} - dotnetPath=${__dotnetDir}/dotnet - ${dotnetPath} "$@" -fi diff --git a/eng/common/enable-cross-org-publishing.ps1 b/eng/common/enable-cross-org-publishing.ps1 deleted file mode 100644 index da09da4f1..000000000 --- a/eng/common/enable-cross-org-publishing.ps1 +++ /dev/null @@ -1,13 +0,0 @@ -param( - [string] $token -) - - -. $PSScriptRoot\pipeline-logging-functions.ps1 - -# Write-PipelineSetVariable will no-op if a variable named $ci is not defined -# Since this script is only ever called in AzDO builds, just universally set it -$ci = $true - -Write-PipelineSetVariable -Name 'VSS_NUGET_ACCESSTOKEN' -Value $token -IsMultiJobVariable $false -Write-PipelineSetVariable -Name 'VSS_NUGET_URI_PREFIXES' -Value 'https://dnceng.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/dnceng/;https://devdiv.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/devdiv/' -IsMultiJobVariable $false diff --git a/eng/common/generate-locproject.ps1 b/eng/common/generate-locproject.ps1 deleted file mode 100644 index 524aaa57f..000000000 --- a/eng/common/generate-locproject.ps1 +++ /dev/null @@ -1,189 +0,0 @@ -Param( - [Parameter(Mandatory=$true)][string] $SourcesDirectory, # Directory where source files live; if using a Localize directory it should live in here - [string] $LanguageSet = 'VS_Main_Languages', # Language set to be used in the LocProject.json - [switch] $UseCheckedInLocProjectJson, # When set, generates a LocProject.json and compares it to one that already exists in the repo; otherwise just generates one - [switch] $CreateNeutralXlfs # Creates neutral xlf files. Only set to false when running locally -) - -# Generates LocProject.json files for the OneLocBuild task. OneLocBuildTask is described here: -# https://ceapex.visualstudio.com/CEINTL/_wiki/wikis/CEINTL.wiki/107/Localization-with-OneLocBuild-Task - -Set-StrictMode -Version 2.0 -$ErrorActionPreference = "Stop" -. $PSScriptRoot\pipeline-logging-functions.ps1 - -$exclusionsFilePath = "$SourcesDirectory\eng\Localize\LocExclusions.json" -$exclusions = @{ Exclusions = @() } -if (Test-Path -Path $exclusionsFilePath) -{ - $exclusions = Get-Content "$exclusionsFilePath" | ConvertFrom-Json -} - -Push-Location "$SourcesDirectory" # push location for Resolve-Path -Relative to work - -# Template files -$jsonFiles = @() -$jsonTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\.template\.config\\localize\\.+\.en\.json" } # .NET templating pattern -$jsonTemplateFiles | ForEach-Object { - $null = $_.Name -Match "(.+)\.[\w-]+\.json" # matches '[filename].[langcode].json - - $destinationFile = "$($_.Directory.FullName)\$($Matches.1).json" - $jsonFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru -} - -$jsonWinformsTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\\strings\.json" } # current winforms pattern - -$wxlFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\.+\.wxl" -And -Not( $_.Directory.Name -Match "\d{4}" ) } # localized files live in four digit lang ID directories; this excludes them -if (-not $wxlFiles) { - $wxlEnFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\1033\\.+\.wxl" } # pick up en files (1033 = en) specifically so we can copy them to use as the neutral xlf files - if ($wxlEnFiles) { - $wxlFiles = @() - $wxlEnFiles | ForEach-Object { - $destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)" - $wxlFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru - } - } -} - -$macosHtmlEnFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\.lproj\\.+\.html$" } # add installer HTML files -$macosHtmlFiles = @() -if ($macosHtmlEnFiles) { - $macosHtmlEnFiles | ForEach-Object { - $destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)" - $macosHtmlFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru - } -} - -$xlfFiles = @() - -$allXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.xlf" -$langXlfFiles = @() -if ($allXlfFiles) { - $null = $allXlfFiles[0].FullName -Match "\.([\w-]+)\.xlf" # matches '[langcode].xlf' - $firstLangCode = $Matches.1 - $langXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.$firstLangCode.xlf" -} -$langXlfFiles | ForEach-Object { - $null = $_.Name -Match "(.+)\.[\w-]+\.xlf" # matches '[filename].[langcode].xlf - - $destinationFile = "$($_.Directory.FullName)\$($Matches.1).xlf" - $xlfFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru -} - -$locFiles = $jsonFiles + $jsonWinformsTemplateFiles + $xlfFiles - -$locJson = @{ - Projects = @( - @{ - LanguageSet = $LanguageSet - LocItems = @( - $locFiles | ForEach-Object { - $outputPath = "$(($_.DirectoryName | Resolve-Path -Relative) + "\")" - $continue = $true - foreach ($exclusion in $exclusions.Exclusions) { - if ($_.FullName.Contains($exclusion)) - { - $continue = $false - } - } - $sourceFile = ($_.FullName | Resolve-Path -Relative) - if (!$CreateNeutralXlfs -and $_.Extension -eq '.xlf') { - Remove-Item -Path $sourceFile - } - if ($continue) - { - if ($_.Directory.Name -eq 'en' -and $_.Extension -eq '.json') { - return @{ - SourceFile = $sourceFile - CopyOption = "LangIDOnPath" - OutputPath = "$($_.Directory.Parent.FullName | Resolve-Path -Relative)\" - } - } else { - return @{ - SourceFile = $sourceFile - CopyOption = "LangIDOnName" - OutputPath = $outputPath - } - } - } - } - ) - }, - @{ - LanguageSet = $LanguageSet - CloneLanguageSet = "WiX_CloneLanguages" - LssFiles = @( "wxl_loc.lss" ) - LocItems = @( - $wxlFiles | ForEach-Object { - $outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\" - $continue = $true - foreach ($exclusion in $exclusions.Exclusions) { - if ($_.FullName.Contains($exclusion)) { - $continue = $false - } - } - $sourceFile = ($_.FullName | Resolve-Path -Relative) - if ($continue) - { - return @{ - SourceFile = $sourceFile - CopyOption = "LangIDOnPath" - OutputPath = $outputPath - } - } - } - ) - }, - @{ - LanguageSet = $LanguageSet - CloneLanguageSet = "VS_macOS_CloneLanguages" - LssFiles = @( ".\eng\common\loc\P22DotNetHtmlLocalization.lss" ) - LocItems = @( - $macosHtmlFiles | ForEach-Object { - $outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\" - $continue = $true - foreach ($exclusion in $exclusions.Exclusions) { - if ($_.FullName.Contains($exclusion)) { - $continue = $false - } - } - $sourceFile = ($_.FullName | Resolve-Path -Relative) - $lciFile = $sourceFile + ".lci" - if ($continue) { - $result = @{ - SourceFile = $sourceFile - CopyOption = "LangIDOnPath" - OutputPath = $outputPath - } - if (Test-Path $lciFile -PathType Leaf) { - $result["LciFile"] = $lciFile - } - return $result - } - } - ) - } - ) -} - -$json = ConvertTo-Json $locJson -Depth 5 -Write-Host "LocProject.json generated:`n`n$json`n`n" -Pop-Location - -if (!$UseCheckedInLocProjectJson) { - New-Item "$SourcesDirectory\eng\Localize\LocProject.json" -Force # Need this to make sure the Localize directory is created - Set-Content "$SourcesDirectory\eng\Localize\LocProject.json" $json -} -else { - New-Item "$SourcesDirectory\eng\Localize\LocProject-generated.json" -Force # Need this to make sure the Localize directory is created - Set-Content "$SourcesDirectory\eng\Localize\LocProject-generated.json" $json - - if ((Get-FileHash "$SourcesDirectory\eng\Localize\LocProject-generated.json").Hash -ne (Get-FileHash "$SourcesDirectory\eng\Localize\LocProject.json").Hash) { - Write-PipelineTelemetryError -Category "OneLocBuild" -Message "Existing LocProject.json differs from generated LocProject.json. Download LocProject-generated.json and compare them." - - exit 1 - } - else { - Write-Host "Generated LocProject.json and current LocProject.json are identical." - } -} diff --git a/eng/common/generate-sbom-prep.ps1 b/eng/common/generate-sbom-prep.ps1 deleted file mode 100644 index a0c7d792a..000000000 --- a/eng/common/generate-sbom-prep.ps1 +++ /dev/null @@ -1,29 +0,0 @@ -Param( - [Parameter(Mandatory=$true)][string] $ManifestDirPath # Manifest directory where sbom will be placed -) - -. $PSScriptRoot\pipeline-logging-functions.ps1 - -# Normally - we'd listen to the manifest path given, but 1ES templates will overwrite if this level gets uploaded directly -# with their own overwriting ours. So we create it as a sub directory of the requested manifest path. -$ArtifactName = "${env:SYSTEM_STAGENAME}_${env:AGENT_JOBNAME}_SBOM" -$SafeArtifactName = $ArtifactName -replace '["/:<>\\|?@*"() ]', '_' -$SbomGenerationDir = Join-Path $ManifestDirPath $SafeArtifactName - -Write-Host "Artifact name before : $ArtifactName" -Write-Host "Artifact name after : $SafeArtifactName" - -Write-Host "Creating dir $ManifestDirPath" - -# create directory for sbom manifest to be placed -if (!(Test-Path -path $SbomGenerationDir)) -{ - New-Item -ItemType Directory -path $SbomGenerationDir - Write-Host "Successfully created directory $SbomGenerationDir" -} -else{ - Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder." -} - -Write-Host "Updating artifact name" -Write-Host "##vso[task.setvariable variable=ARTIFACT_NAME]$SafeArtifactName" diff --git a/eng/common/generate-sbom-prep.sh b/eng/common/generate-sbom-prep.sh deleted file mode 100755 index b8ecca72b..000000000 --- a/eng/common/generate-sbom-prep.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env bash - -source="${BASH_SOURCE[0]}" - -# resolve $SOURCE until the file is no longer a symlink -while [[ -h $source ]]; do - scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - source="$(readlink "$source")" - - # if $source was a relative symlink, we need to resolve it relative to the path where the - # symlink file was located - [[ $source != /* ]] && source="$scriptroot/$source" -done -scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" -. $scriptroot/pipeline-logging-functions.sh - - -# replace all special characters with _, some builds use special characters like : in Agent.Jobname, that is not a permissible name while uploading artifacts. -artifact_name=$SYSTEM_STAGENAME"_"$AGENT_JOBNAME"_SBOM" -safe_artifact_name="${artifact_name//["/:<>\\|?@*$" ]/_}" -manifest_dir=$1 - -# Normally - we'd listen to the manifest path given, but 1ES templates will overwrite if this level gets uploaded directly -# with their own overwriting ours. So we create it as a sub directory of the requested manifest path. -sbom_generation_dir="$manifest_dir/$safe_artifact_name" - -if [ ! -d "$sbom_generation_dir" ] ; then - mkdir -p "$sbom_generation_dir" - echo "Sbom directory created." $sbom_generation_dir -else - Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder." -fi - -echo "Artifact name before : "$artifact_name -echo "Artifact name after : "$safe_artifact_name -export ARTIFACT_NAME=$safe_artifact_name -echo "##vso[task.setvariable variable=ARTIFACT_NAME]$safe_artifact_name" - -exit 0 diff --git a/eng/common/helixpublish.proj b/eng/common/helixpublish.proj deleted file mode 100644 index c1323bf41..000000000 --- a/eng/common/helixpublish.proj +++ /dev/null @@ -1,27 +0,0 @@ - - - - - msbuild - - - - - %(Identity) - - - - - - $(WorkItemDirectory) - $(WorkItemCommand) - $(WorkItemTimeout) - - - - - - - - - diff --git a/eng/common/init-tools-native.cmd b/eng/common/init-tools-native.cmd deleted file mode 100644 index 438cd548c..000000000 --- a/eng/common/init-tools-native.cmd +++ /dev/null @@ -1,3 +0,0 @@ -@echo off -powershell -NoProfile -NoLogo -ExecutionPolicy ByPass -command "& """%~dp0init-tools-native.ps1""" %*" -exit /b %ErrorLevel% \ No newline at end of file diff --git a/eng/common/init-tools-native.ps1 b/eng/common/init-tools-native.ps1 deleted file mode 100644 index 27ccdb9ec..000000000 --- a/eng/common/init-tools-native.ps1 +++ /dev/null @@ -1,203 +0,0 @@ -<# -.SYNOPSIS -Entry point script for installing native tools - -.DESCRIPTION -Reads $RepoRoot\global.json file to determine native assets to install -and executes installers for those tools - -.PARAMETER BaseUri -Base file directory or Url from which to acquire tool archives - -.PARAMETER InstallDirectory -Directory to install native toolset. This is a command-line override for the default -Install directory precedence order: -- InstallDirectory command-line override -- NETCOREENG_INSTALL_DIRECTORY environment variable -- (default) %USERPROFILE%/.netcoreeng/native - -.PARAMETER Clean -Switch specifying to not install anything, but cleanup native asset folders - -.PARAMETER Force -Clean and then install tools - -.PARAMETER DownloadRetries -Total number of retry attempts - -.PARAMETER RetryWaitTimeInSeconds -Wait time between retry attempts in seconds - -.PARAMETER GlobalJsonFile -File path to global.json file - -.PARAMETER PathPromotion -Optional switch to enable either promote native tools specified in the global.json to the path (in Azure Pipelines) -or break the build if a native tool is not found on the path (on a local dev machine) - -.NOTES -#> -[CmdletBinding(PositionalBinding=$false)] -Param ( - [string] $BaseUri = 'https://netcorenativeassets.blob.core.windows.net/resource-packages/external', - [string] $InstallDirectory, - [switch] $Clean = $False, - [switch] $Force = $False, - [int] $DownloadRetries = 5, - [int] $RetryWaitTimeInSeconds = 30, - [string] $GlobalJsonFile, - [switch] $PathPromotion -) - -if (!$GlobalJsonFile) { - $GlobalJsonFile = Join-Path (Get-Item $PSScriptRoot).Parent.Parent.FullName 'global.json' -} - -Set-StrictMode -version 2.0 -$ErrorActionPreference='Stop' - -. $PSScriptRoot\pipeline-logging-functions.ps1 -Import-Module -Name (Join-Path $PSScriptRoot 'native\CommonLibrary.psm1') - -try { - # Define verbose switch if undefined - $Verbose = $VerbosePreference -Eq 'Continue' - - $EngCommonBaseDir = Join-Path $PSScriptRoot 'native\' - $NativeBaseDir = $InstallDirectory - if (!$NativeBaseDir) { - $NativeBaseDir = CommonLibrary\Get-NativeInstallDirectory - } - $Env:CommonLibrary_NativeInstallDir = $NativeBaseDir - $InstallBin = Join-Path $NativeBaseDir 'bin' - $InstallerPath = Join-Path $EngCommonBaseDir 'install-tool.ps1' - - # Process tools list - Write-Host "Processing $GlobalJsonFile" - If (-Not (Test-Path $GlobalJsonFile)) { - Write-Host "Unable to find '$GlobalJsonFile'" - exit 0 - } - $NativeTools = Get-Content($GlobalJsonFile) -Raw | - ConvertFrom-Json | - Select-Object -Expand 'native-tools' -ErrorAction SilentlyContinue - if ($NativeTools) { - if ($PathPromotion -eq $True) { - $ArcadeToolsDirectory = "$env:SYSTEMDRIVE\arcade-tools" - if (Test-Path $ArcadeToolsDirectory) { # if this directory exists, we should use native tools on machine - $NativeTools.PSObject.Properties | ForEach-Object { - $ToolName = $_.Name - $ToolVersion = $_.Value - $InstalledTools = @{} - - if ((Get-Command "$ToolName" -ErrorAction SilentlyContinue) -eq $null) { - if ($ToolVersion -eq "latest") { - $ToolVersion = "" - } - $ToolDirectories = (Get-ChildItem -Path "$ArcadeToolsDirectory" -Filter "$ToolName-$ToolVersion*" | Sort-Object -Descending) - if ($ToolDirectories -eq $null) { - Write-Error "Unable to find directory for $ToolName $ToolVersion; please make sure the tool is installed on this image." - exit 1 - } - $ToolDirectory = $ToolDirectories[0] - $BinPathFile = "$($ToolDirectory.FullName)\binpath.txt" - if (-not (Test-Path -Path "$BinPathFile")) { - Write-Error "Unable to find binpath.txt in '$($ToolDirectory.FullName)' ($ToolName $ToolVersion); artifact is either installed incorrectly or is not a bootstrappable tool." - exit 1 - } - $BinPath = Get-Content "$BinPathFile" - $ToolPath = Convert-Path -Path $BinPath - Write-Host "Adding $ToolName to the path ($ToolPath)..." - Write-Host "##vso[task.prependpath]$ToolPath" - $env:PATH = "$ToolPath;$env:PATH" - $InstalledTools += @{ $ToolName = $ToolDirectory.FullName } - } - } - return $InstalledTools - } else { - $NativeTools.PSObject.Properties | ForEach-Object { - $ToolName = $_.Name - $ToolVersion = $_.Value - - if ((Get-Command "$ToolName" -ErrorAction SilentlyContinue) -eq $null) { - Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message "$ToolName not found on path. Please install $ToolName $ToolVersion before proceeding." - Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message "If this is running on a build machine, the arcade-tools directory was not found, which means there's an error with the image." - } - } - exit 0 - } - } else { - $NativeTools.PSObject.Properties | ForEach-Object { - $ToolName = $_.Name - $ToolVersion = $_.Value - $LocalInstallerArguments = @{ ToolName = "$ToolName" } - $LocalInstallerArguments += @{ InstallPath = "$InstallBin" } - $LocalInstallerArguments += @{ BaseUri = "$BaseUri" } - $LocalInstallerArguments += @{ CommonLibraryDirectory = "$EngCommonBaseDir" } - $LocalInstallerArguments += @{ Version = "$ToolVersion" } - - if ($Verbose) { - $LocalInstallerArguments += @{ Verbose = $True } - } - if (Get-Variable 'Force' -ErrorAction 'SilentlyContinue') { - if($Force) { - $LocalInstallerArguments += @{ Force = $True } - } - } - if ($Clean) { - $LocalInstallerArguments += @{ Clean = $True } - } - - Write-Verbose "Installing $ToolName version $ToolVersion" - Write-Verbose "Executing '$InstallerPath $($LocalInstallerArguments.Keys.ForEach({"-$_ '$($LocalInstallerArguments.$_)'"}) -join ' ')'" - & $InstallerPath @LocalInstallerArguments - if ($LASTEXITCODE -Ne "0") { - $errMsg = "$ToolName installation failed" - if ((Get-Variable 'DoNotAbortNativeToolsInstallationOnFailure' -ErrorAction 'SilentlyContinue') -and $DoNotAbortNativeToolsInstallationOnFailure) { - $showNativeToolsWarning = $true - if ((Get-Variable 'DoNotDisplayNativeToolsInstallationWarnings' -ErrorAction 'SilentlyContinue') -and $DoNotDisplayNativeToolsInstallationWarnings) { - $showNativeToolsWarning = $false - } - if ($showNativeToolsWarning) { - Write-Warning $errMsg - } - $toolInstallationFailure = $true - } else { - # We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482 - Write-Host $errMsg - exit 1 - } - } - } - - if ((Get-Variable 'toolInstallationFailure' -ErrorAction 'SilentlyContinue') -and $toolInstallationFailure) { - # We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482 - Write-Host 'Native tools bootstrap failed' - exit 1 - } - } - } - else { - Write-Host 'No native tools defined in global.json' - exit 0 - } - - if ($Clean) { - exit 0 - } - if (Test-Path $InstallBin) { - Write-Host 'Native tools are available from ' (Convert-Path -Path $InstallBin) - Write-Host "##vso[task.prependpath]$(Convert-Path -Path $InstallBin)" - return $InstallBin - } - elseif (-not ($PathPromotion)) { - Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message 'Native tools install directory does not exist, installation failed' - exit 1 - } - exit 0 -} -catch { - Write-Host $_.ScriptStackTrace - Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message $_ - ExitWithExitCode 1 -} diff --git a/eng/common/init-tools-native.sh b/eng/common/init-tools-native.sh deleted file mode 100755 index 3e6a8d6ac..000000000 --- a/eng/common/init-tools-native.sh +++ /dev/null @@ -1,238 +0,0 @@ -#!/usr/bin/env bash - -source="${BASH_SOURCE[0]}" -scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - -base_uri='https://netcorenativeassets.blob.core.windows.net/resource-packages/external' -install_directory='' -clean=false -force=false -download_retries=5 -retry_wait_time_seconds=30 -global_json_file="$(dirname "$(dirname "${scriptroot}")")/global.json" -declare -a native_assets - -. $scriptroot/pipeline-logging-functions.sh -. $scriptroot/native/common-library.sh - -while (($# > 0)); do - lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" - case $lowerI in - --baseuri) - base_uri=$2 - shift 2 - ;; - --installdirectory) - install_directory=$2 - shift 2 - ;; - --clean) - clean=true - shift 1 - ;; - --force) - force=true - shift 1 - ;; - --donotabortonfailure) - donotabortonfailure=true - shift 1 - ;; - --donotdisplaywarnings) - donotdisplaywarnings=true - shift 1 - ;; - --downloadretries) - download_retries=$2 - shift 2 - ;; - --retrywaittimeseconds) - retry_wait_time_seconds=$2 - shift 2 - ;; - --help) - echo "Common settings:" - echo " --installdirectory Directory to install native toolset." - echo " This is a command-line override for the default" - echo " Install directory precedence order:" - echo " - InstallDirectory command-line override" - echo " - NETCOREENG_INSTALL_DIRECTORY environment variable" - echo " - (default) %USERPROFILE%/.netcoreeng/native" - echo "" - echo " --clean Switch specifying not to install anything, but cleanup native asset folders" - echo " --donotabortonfailure Switch specifiying whether to abort native tools installation on failure" - echo " --donotdisplaywarnings Switch specifiying whether to display warnings during native tools installation on failure" - echo " --force Clean and then install tools" - echo " --help Print help and exit" - echo "" - echo "Advanced settings:" - echo " --baseuri Base URI for where to download native tools from" - echo " --downloadretries Number of times a download should be attempted" - echo " --retrywaittimeseconds Wait time between download attempts" - echo "" - exit 0 - ;; - esac -done - -function ReadGlobalJsonNativeTools { - # happy path: we have a proper JSON parsing tool `jq(1)` in PATH! - if command -v jq &> /dev/null; then - - # jq: read each key/value pair under "native-tools" entry and emit: - # KEY="" VALUE="" - # followed by a null byte. - # - # bash: read line with null byte delimeter and push to array (for later `eval`uation). - - while IFS= read -rd '' line; do - native_assets+=("$line") - done < <(jq -r '. | - select(has("native-tools")) | - ."native-tools" | - keys[] as $k | - @sh "KEY=\($k) VALUE=\(.[$k])\u0000"' "$global_json_file") - - return - fi - - # Warning: falling back to manually parsing JSON, which is not recommended. - - # Following routine matches the output and escaping logic of jq(1)'s @sh formatter used above. - # It has been tested with several weird strings with escaped characters in entries (key and value) - # and results were compared with the output of jq(1) in binary representation using xxd(1); - # just before the assignment to 'native_assets' array (above and below). - - # try to capture the section under "native-tools". - if [[ ! "$(cat "$global_json_file")" =~ \"native-tools\"[[:space:]\:\{]*([^\}]+) ]]; then - return - fi - - section="${BASH_REMATCH[1]}" - - parseStarted=0 - possibleEnd=0 - escaping=0 - escaped=0 - isKey=1 - - for (( i=0; i<${#section}; i++ )); do - char="${section:$i:1}" - if ! ((parseStarted)) && [[ "$char" =~ [[:space:],:] ]]; then continue; fi - - if ! ((escaping)) && [[ "$char" == "\\" ]]; then - escaping=1 - elif ((escaping)) && ! ((escaped)); then - escaped=1 - fi - - if ! ((parseStarted)) && [[ "$char" == "\"" ]]; then - parseStarted=1 - possibleEnd=0 - elif [[ "$char" == "'" ]]; then - token="$token'\\\''" - possibleEnd=0 - elif ((escaping)) || [[ "$char" != "\"" ]]; then - token="$token$char" - possibleEnd=1 - fi - - if ((possibleEnd)) && ! ((escaping)) && [[ "$char" == "\"" ]]; then - # Use printf to unescape token to match jq(1)'s @sh formatting rules. - # do not use 'token="$(printf "$token")"' syntax, as $() eats the trailing linefeed. - printf -v token "'$token'" - - if ((isKey)); then - KEY="$token" - isKey=0 - else - line="KEY=$KEY VALUE=$token" - native_assets+=("$line") - isKey=1 - fi - - # reset for next token - parseStarted=0 - token= - elif ((escaping)) && ((escaped)); then - escaping=0 - escaped=0 - fi - done -} - -native_base_dir=$install_directory -if [[ -z $install_directory ]]; then - native_base_dir=$(GetNativeInstallDirectory) -fi - -install_bin="${native_base_dir}/bin" -installed_any=false - -ReadGlobalJsonNativeTools - -if [[ ${#native_assets[@]} -eq 0 ]]; then - echo "No native tools defined in global.json" - exit 0; -else - native_installer_dir="$scriptroot/native" - for index in "${!native_assets[@]}"; do - eval "${native_assets["$index"]}" - - installer_path="$native_installer_dir/install-$KEY.sh" - installer_command="$installer_path" - installer_command+=" --baseuri $base_uri" - installer_command+=" --installpath $install_bin" - installer_command+=" --version $VALUE" - echo $installer_command - - if [[ $force = true ]]; then - installer_command+=" --force" - fi - - if [[ $clean = true ]]; then - installer_command+=" --clean" - fi - - if [[ -a $installer_path ]]; then - $installer_command - if [[ $? != 0 ]]; then - if [[ $donotabortonfailure = true ]]; then - if [[ $donotdisplaywarnings != true ]]; then - Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed" - fi - else - Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed" - exit 1 - fi - else - $installed_any = true - fi - else - if [[ $donotabortonfailure == true ]]; then - if [[ $donotdisplaywarnings != true ]]; then - Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed: no install script" - fi - else - Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed: no install script" - exit 1 - fi - fi - done -fi - -if [[ $clean = true ]]; then - exit 0 -fi - -if [[ -d $install_bin ]]; then - echo "Native tools are available from $install_bin" - echo "##vso[task.prependpath]$install_bin" -else - if [[ $installed_any = true ]]; then - Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Native tools install directory does not exist, installation failed" - exit 1 - fi -fi - -exit 0 diff --git a/eng/common/internal-feed-operations.ps1 b/eng/common/internal-feed-operations.ps1 deleted file mode 100644 index 92b77347d..000000000 --- a/eng/common/internal-feed-operations.ps1 +++ /dev/null @@ -1,132 +0,0 @@ -param( - [Parameter(Mandatory=$true)][string] $Operation, - [string] $AuthToken, - [string] $CommitSha, - [string] $RepoName, - [switch] $IsFeedPrivate -) - -$ErrorActionPreference = 'Stop' -Set-StrictMode -Version 2.0 -. $PSScriptRoot\tools.ps1 - -# Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed -# in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in -# https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables. This should ONLY be called from identified -# internal builds -function SetupCredProvider { - param( - [string] $AuthToken - ) - - # Install the Cred Provider NuGet plugin - Write-Host 'Setting up Cred Provider NuGet plugin in the agent...' - Write-Host "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..." - - $url = 'https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.ps1' - - Write-Host "Writing the contents of 'installcredprovider.ps1' locally..." - Invoke-WebRequest $url -OutFile installcredprovider.ps1 - - Write-Host 'Installing plugin...' - .\installcredprovider.ps1 -Force - - Write-Host "Deleting local copy of 'installcredprovider.ps1'..." - Remove-Item .\installcredprovider.ps1 - - if (-Not("$env:USERPROFILE\.nuget\plugins\netcore")) { - Write-PipelineTelemetryError -Category 'Arcade' -Message 'CredProvider plugin was not installed correctly!' - ExitWithExitCode 1 - } - else { - Write-Host 'CredProvider plugin was installed correctly!' - } - - # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable - # feeds successfully - - $nugetConfigPath = Join-Path $RepoRoot "NuGet.config" - - if (-Not (Test-Path -Path $nugetConfigPath)) { - Write-PipelineTelemetryError -Category 'Build' -Message 'NuGet.config file not found in repo root!' - ExitWithExitCode 1 - } - - $endpoints = New-Object System.Collections.ArrayList - $nugetConfigPackageSources = Select-Xml -Path $nugetConfigPath -XPath "//packageSources/add[contains(@key, 'darc-int-')]/@value" | foreach{$_.Node.Value} - - if (($nugetConfigPackageSources | Measure-Object).Count -gt 0 ) { - foreach ($stableRestoreResource in $nugetConfigPackageSources) { - $trimmedResource = ([string]$stableRestoreResource).Trim() - [void]$endpoints.Add(@{endpoint="$trimmedResource"; password="$AuthToken"}) - } - } - - if (($endpoints | Measure-Object).Count -gt 0) { - $endpointCredentials = @{endpointCredentials=$endpoints} | ConvertTo-Json -Compress - - # Create the environment variables the AzDo way - Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $endpointCredentials -Properties @{ - 'variable' = 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' - 'issecret' = 'false' - } - - # We don't want sessions cached since we will be updating the endpoints quite frequently - Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data 'False' -Properties @{ - 'variable' = 'NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED' - 'issecret' = 'false' - } - } - else - { - Write-Host 'No internal endpoints found in NuGet.config' - } -} - -#Workaround for https://github.com/microsoft/msbuild/issues/4430 -function InstallDotNetSdkAndRestoreArcade { - $dotnetTempDir = Join-Path $RepoRoot "dotnet" - $dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*) - $dotnet = "$dotnetTempDir\dotnet.exe" - $restoreProjPath = "$PSScriptRoot\restore.proj" - - Write-Host "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..." - InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion" - - '' | Out-File "$restoreProjPath" - - & $dotnet restore $restoreProjPath - - Write-Host 'Arcade SDK restored!' - - if (Test-Path -Path $restoreProjPath) { - Remove-Item $restoreProjPath - } - - if (Test-Path -Path $dotnetTempDir) { - Remove-Item $dotnetTempDir -Recurse - } -} - -try { - Push-Location $PSScriptRoot - - if ($Operation -like 'setup') { - SetupCredProvider $AuthToken - } - elseif ($Operation -like 'install-restore') { - InstallDotNetSdkAndRestoreArcade - } - else { - Write-PipelineTelemetryError -Category 'Arcade' -Message "Unknown operation '$Operation'!" - ExitWithExitCode 1 - } -} -catch { - Write-Host $_.ScriptStackTrace - Write-PipelineTelemetryError -Category 'Arcade' -Message $_ - ExitWithExitCode 1 -} -finally { - Pop-Location -} diff --git a/eng/common/internal-feed-operations.sh b/eng/common/internal-feed-operations.sh deleted file mode 100755 index 9378223ba..000000000 --- a/eng/common/internal-feed-operations.sh +++ /dev/null @@ -1,141 +0,0 @@ -#!/usr/bin/env bash - -set -e - -# Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed -# in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in -# https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables. -# This should ONLY be called from identified internal builds -function SetupCredProvider { - local authToken=$1 - - # Install the Cred Provider NuGet plugin - echo "Setting up Cred Provider NuGet plugin in the agent..."... - echo "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..." - - local url="https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.sh" - - echo "Writing the contents of 'installcredprovider.ps1' locally..." - local installcredproviderPath="installcredprovider.sh" - if command -v curl > /dev/null; then - curl $url > "$installcredproviderPath" - else - wget -q -O "$installcredproviderPath" "$url" - fi - - echo "Installing plugin..." - . "$installcredproviderPath" - - echo "Deleting local copy of 'installcredprovider.sh'..." - rm installcredprovider.sh - - if [ ! -d "$HOME/.nuget/plugins" ]; then - Write-PipelineTelemetryError -category 'Build' 'CredProvider plugin was not installed correctly!' - ExitWithExitCode 1 - else - echo "CredProvider plugin was installed correctly!" - fi - - # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable - # feeds successfully - - local nugetConfigPath="{$repo_root}NuGet.config" - - if [ ! "$nugetConfigPath" ]; then - Write-PipelineTelemetryError -category 'Build' "NuGet.config file not found in repo's root!" - ExitWithExitCode 1 - fi - - local endpoints='[' - local nugetConfigPackageValues=`cat "$nugetConfigPath" | grep "key=\"darc-int-"` - local pattern="value=\"(.*)\"" - - for value in $nugetConfigPackageValues - do - if [[ $value =~ $pattern ]]; then - local endpoint="${BASH_REMATCH[1]}" - endpoints+="{\"endpoint\": \"$endpoint\", \"password\": \"$authToken\"}," - fi - done - - endpoints=${endpoints%?} - endpoints+=']' - - if [ ${#endpoints} -gt 2 ]; then - local endpointCredentials="{\"endpointCredentials\": "$endpoints"}" - - echo "##vso[task.setvariable variable=VSS_NUGET_EXTERNAL_FEED_ENDPOINTS]$endpointCredentials" - echo "##vso[task.setvariable variable=NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED]False" - else - echo "No internal endpoints found in NuGet.config" - fi -} - -# Workaround for https://github.com/microsoft/msbuild/issues/4430 -function InstallDotNetSdkAndRestoreArcade { - local dotnetTempDir="$repo_root/dotnet" - local dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*) - local restoreProjPath="$repo_root/eng/common/restore.proj" - - echo "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..." - echo "" > "$restoreProjPath" - - InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion" - - local res=`$dotnetTempDir/dotnet restore $restoreProjPath` - echo "Arcade SDK restored!" - - # Cleanup - if [ "$restoreProjPath" ]; then - rm "$restoreProjPath" - fi - - if [ "$dotnetTempDir" ]; then - rm -r $dotnetTempDir - fi -} - -source="${BASH_SOURCE[0]}" -operation='' -authToken='' -repoName='' - -while [[ $# > 0 ]]; do - opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" - case "$opt" in - --operation) - operation=$2 - shift - ;; - --authtoken) - authToken=$2 - shift - ;; - *) - echo "Invalid argument: $1" - usage - exit 1 - ;; - esac - - shift -done - -while [[ -h "$source" ]]; do - scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - source="$(readlink "$source")" - # if $source was a relative symlink, we need to resolve it relative to the path where the - # symlink file was located - [[ $source != /* ]] && source="$scriptroot/$source" -done -scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - -. "$scriptroot/tools.sh" - -if [ "$operation" = "setup" ]; then - SetupCredProvider $authToken -elif [ "$operation" = "install-restore" ]; then - InstallDotNetSdkAndRestoreArcade -else - echo "Unknown operation '$operation'!" -fi diff --git a/eng/common/internal/Directory.Build.props b/eng/common/internal/Directory.Build.props deleted file mode 100644 index f1d041c33..000000000 --- a/eng/common/internal/Directory.Build.props +++ /dev/null @@ -1,11 +0,0 @@ - - - - - false - false - - - - - diff --git a/eng/common/internal/NuGet.config b/eng/common/internal/NuGet.config deleted file mode 100644 index f70261ed6..000000000 --- a/eng/common/internal/NuGet.config +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - - - - - diff --git a/eng/common/internal/Tools.csproj b/eng/common/internal/Tools.csproj deleted file mode 100644 index feaa6d208..000000000 --- a/eng/common/internal/Tools.csproj +++ /dev/null @@ -1,22 +0,0 @@ - - - - - net472 - false - false - - - - - - - - - - - - - - - diff --git a/eng/common/loc/P22DotNetHtmlLocalization.lss b/eng/common/loc/P22DotNetHtmlLocalization.lss deleted file mode 100644 index 5d892d619..000000000 --- a/eng/common/loc/P22DotNetHtmlLocalization.lss +++ /dev/null @@ -1,29 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/eng/common/msbuild.ps1 b/eng/common/msbuild.ps1 deleted file mode 100644 index f041e5ddd..000000000 --- a/eng/common/msbuild.ps1 +++ /dev/null @@ -1,28 +0,0 @@ -[CmdletBinding(PositionalBinding=$false)] -Param( - [string] $verbosity = 'minimal', - [bool] $warnAsError = $true, - [bool] $nodeReuse = $true, - [switch] $ci, - [switch] $prepareMachine, - [switch] $excludePrereleaseVS, - [string] $msbuildEngine = $null, - [Parameter(ValueFromRemainingArguments=$true)][String[]]$extraArgs -) - -. $PSScriptRoot\tools.ps1 - -try { - if ($ci) { - $nodeReuse = $false - } - - MSBuild @extraArgs -} -catch { - Write-Host $_.ScriptStackTrace - Write-PipelineTelemetryError -Category 'Build' -Message $_ - ExitWithExitCode 1 -} - -ExitWithExitCode 0 \ No newline at end of file diff --git a/eng/common/msbuild.sh b/eng/common/msbuild.sh deleted file mode 100755 index 20d3dad54..000000000 --- a/eng/common/msbuild.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/env bash - -source="${BASH_SOURCE[0]}" - -# resolve $source until the file is no longer a symlink -while [[ -h "$source" ]]; do - scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - source="$(readlink "$source")" - # if $source was a relative symlink, we need to resolve it relative to the path where the - # symlink file was located - [[ $source != /* ]] && source="$scriptroot/$source" -done -scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - -verbosity='minimal' -warn_as_error=true -node_reuse=true -prepare_machine=false -extra_args='' - -while (($# > 0)); do - lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" - case $lowerI in - --verbosity) - verbosity=$2 - shift 2 - ;; - --warnaserror) - warn_as_error=$2 - shift 2 - ;; - --nodereuse) - node_reuse=$2 - shift 2 - ;; - --ci) - ci=true - shift 1 - ;; - --preparemachine) - prepare_machine=true - shift 1 - ;; - *) - extra_args="$extra_args $1" - shift 1 - ;; - esac -done - -. "$scriptroot/tools.sh" - -if [[ "$ci" == true ]]; then - node_reuse=false -fi - -MSBuild $extra_args -ExitWithExitCode 0 diff --git a/eng/common/native/CommonLibrary.psm1 b/eng/common/native/CommonLibrary.psm1 deleted file mode 100644 index f71f6af6c..000000000 --- a/eng/common/native/CommonLibrary.psm1 +++ /dev/null @@ -1,401 +0,0 @@ -<# -.SYNOPSIS -Helper module to install an archive to a directory - -.DESCRIPTION -Helper module to download and extract an archive to a specified directory - -.PARAMETER Uri -Uri of artifact to download - -.PARAMETER InstallDirectory -Directory to extract artifact contents to - -.PARAMETER Force -Force download / extraction if file or contents already exist. Default = False - -.PARAMETER DownloadRetries -Total number of retry attempts. Default = 5 - -.PARAMETER RetryWaitTimeInSeconds -Wait time between retry attempts in seconds. Default = 30 - -.NOTES -Returns False if download or extraction fail, True otherwise -#> -function DownloadAndExtract { - [CmdletBinding(PositionalBinding=$false)] - Param ( - [Parameter(Mandatory=$True)] - [string] $Uri, - [Parameter(Mandatory=$True)] - [string] $InstallDirectory, - [switch] $Force = $False, - [int] $DownloadRetries = 5, - [int] $RetryWaitTimeInSeconds = 30 - ) - # Define verbose switch if undefined - $Verbose = $VerbosePreference -Eq "Continue" - - $TempToolPath = CommonLibrary\Get-TempPathFilename -Path $Uri - - # Download native tool - $DownloadStatus = CommonLibrary\Get-File -Uri $Uri ` - -Path $TempToolPath ` - -DownloadRetries $DownloadRetries ` - -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds ` - -Force:$Force ` - -Verbose:$Verbose - - if ($DownloadStatus -Eq $False) { - Write-Error "Download failed from $Uri" - return $False - } - - # Extract native tool - $UnzipStatus = CommonLibrary\Expand-Zip -ZipPath $TempToolPath ` - -OutputDirectory $InstallDirectory ` - -Force:$Force ` - -Verbose:$Verbose - - if ($UnzipStatus -Eq $False) { - # Retry Download one more time with Force=true - $DownloadRetryStatus = CommonLibrary\Get-File -Uri $Uri ` - -Path $TempToolPath ` - -DownloadRetries 1 ` - -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds ` - -Force:$True ` - -Verbose:$Verbose - - if ($DownloadRetryStatus -Eq $False) { - Write-Error "Last attempt of download failed as well" - return $False - } - - # Retry unzip again one more time with Force=true - $UnzipRetryStatus = CommonLibrary\Expand-Zip -ZipPath $TempToolPath ` - -OutputDirectory $InstallDirectory ` - -Force:$True ` - -Verbose:$Verbose - if ($UnzipRetryStatus -Eq $False) - { - Write-Error "Last attempt of unzip failed as well" - # Clean up partial zips and extracts - if (Test-Path $TempToolPath) { - Remove-Item $TempToolPath -Force - } - if (Test-Path $InstallDirectory) { - Remove-Item $InstallDirectory -Force -Recurse - } - return $False - } - } - - return $True -} - -<# -.SYNOPSIS -Download a file, retry on failure - -.DESCRIPTION -Download specified file and retry if attempt fails - -.PARAMETER Uri -Uri of file to download. If Uri is a local path, the file will be copied instead of downloaded - -.PARAMETER Path -Path to download or copy uri file to - -.PARAMETER Force -Overwrite existing file if present. Default = False - -.PARAMETER DownloadRetries -Total number of retry attempts. Default = 5 - -.PARAMETER RetryWaitTimeInSeconds -Wait time between retry attempts in seconds Default = 30 - -#> -function Get-File { - [CmdletBinding(PositionalBinding=$false)] - Param ( - [Parameter(Mandatory=$True)] - [string] $Uri, - [Parameter(Mandatory=$True)] - [string] $Path, - [int] $DownloadRetries = 5, - [int] $RetryWaitTimeInSeconds = 30, - [switch] $Force = $False - ) - $Attempt = 0 - - if ($Force) { - if (Test-Path $Path) { - Remove-Item $Path -Force - } - } - if (Test-Path $Path) { - Write-Host "File '$Path' already exists, skipping download" - return $True - } - - $DownloadDirectory = Split-Path -ErrorAction Ignore -Path "$Path" -Parent - if (-Not (Test-Path $DownloadDirectory)) { - New-Item -path $DownloadDirectory -force -itemType "Directory" | Out-Null - } - - $TempPath = "$Path.tmp" - if (Test-Path -IsValid -Path $Uri) { - Write-Verbose "'$Uri' is a file path, copying temporarily to '$TempPath'" - Copy-Item -Path $Uri -Destination $TempPath - Write-Verbose "Moving temporary file to '$Path'" - Move-Item -Path $TempPath -Destination $Path - return $? - } - else { - Write-Verbose "Downloading $Uri" - # Don't display the console progress UI - it's a huge perf hit - $ProgressPreference = 'SilentlyContinue' - while($Attempt -Lt $DownloadRetries) - { - try { - Invoke-WebRequest -UseBasicParsing -Uri $Uri -OutFile $TempPath - Write-Verbose "Downloaded to temporary location '$TempPath'" - Move-Item -Path $TempPath -Destination $Path - Write-Verbose "Moved temporary file to '$Path'" - return $True - } - catch { - $Attempt++ - if ($Attempt -Lt $DownloadRetries) { - $AttemptsLeft = $DownloadRetries - $Attempt - Write-Warning "Download failed, $AttemptsLeft attempts remaining, will retry in $RetryWaitTimeInSeconds seconds" - Start-Sleep -Seconds $RetryWaitTimeInSeconds - } - else { - Write-Error $_ - Write-Error $_.Exception - } - } - } - } - - return $False -} - -<# -.SYNOPSIS -Generate a shim for a native tool - -.DESCRIPTION -Creates a wrapper script (shim) that passes arguments forward to native tool assembly - -.PARAMETER ShimName -The name of the shim - -.PARAMETER ShimDirectory -The directory where shims are stored - -.PARAMETER ToolFilePath -Path to file that shim forwards to - -.PARAMETER Force -Replace shim if already present. Default = False - -.NOTES -Returns $True if generating shim succeeds, $False otherwise -#> -function New-ScriptShim { - [CmdletBinding(PositionalBinding=$false)] - Param ( - [Parameter(Mandatory=$True)] - [string] $ShimName, - [Parameter(Mandatory=$True)] - [string] $ShimDirectory, - [Parameter(Mandatory=$True)] - [string] $ToolFilePath, - [Parameter(Mandatory=$True)] - [string] $BaseUri, - [switch] $Force - ) - try { - Write-Verbose "Generating '$ShimName' shim" - - if (-Not (Test-Path $ToolFilePath)){ - Write-Error "Specified tool file path '$ToolFilePath' does not exist" - return $False - } - - # WinShimmer is a small .NET Framework program that creates .exe shims to bootstrapped programs - # Many of the checks for installed programs expect a .exe extension for Windows tools, rather - # than a .bat or .cmd file. - # Source: https://github.com/dotnet/arcade/tree/master/src/WinShimmer - if (-Not (Test-Path "$ShimDirectory\WinShimmer\winshimmer.exe")) { - $InstallStatus = DownloadAndExtract -Uri "$BaseUri/windows/winshimmer/WinShimmer.zip" ` - -InstallDirectory $ShimDirectory\WinShimmer ` - -Force:$Force ` - -DownloadRetries 2 ` - -RetryWaitTimeInSeconds 5 ` - -Verbose:$Verbose - } - - if ((Test-Path (Join-Path $ShimDirectory "$ShimName.exe"))) { - Write-Host "$ShimName.exe already exists; replacing..." - Remove-Item (Join-Path $ShimDirectory "$ShimName.exe") - } - - & "$ShimDirectory\WinShimmer\winshimmer.exe" $ShimName $ToolFilePath $ShimDirectory - return $True - } - catch { - Write-Host $_ - Write-Host $_.Exception - return $False - } -} - -<# -.SYNOPSIS -Returns the machine architecture of the host machine - -.NOTES -Returns 'x64' on 64 bit machines - Returns 'x86' on 32 bit machines -#> -function Get-MachineArchitecture { - $ProcessorArchitecture = $Env:PROCESSOR_ARCHITECTURE - $ProcessorArchitectureW6432 = $Env:PROCESSOR_ARCHITEW6432 - if($ProcessorArchitecture -Eq "X86") - { - if(($ProcessorArchitectureW6432 -Eq "") -Or - ($ProcessorArchitectureW6432 -Eq "X86")) { - return "x86" - } - $ProcessorArchitecture = $ProcessorArchitectureW6432 - } - if (($ProcessorArchitecture -Eq "AMD64") -Or - ($ProcessorArchitecture -Eq "IA64") -Or - ($ProcessorArchitecture -Eq "ARM64") -Or - ($ProcessorArchitecture -Eq "LOONGARCH64") -Or - ($ProcessorArchitecture -Eq "RISCV64")) { - return "x64" - } - return "x86" -} - -<# -.SYNOPSIS -Get the name of a temporary folder under the native install directory -#> -function Get-TempDirectory { - return Join-Path (Get-NativeInstallDirectory) "temp/" -} - -function Get-TempPathFilename { - [CmdletBinding(PositionalBinding=$false)] - Param ( - [Parameter(Mandatory=$True)] - [string] $Path - ) - $TempDir = CommonLibrary\Get-TempDirectory - $TempFilename = Split-Path $Path -leaf - $TempPath = Join-Path $TempDir $TempFilename - return $TempPath -} - -<# -.SYNOPSIS -Returns the base directory to use for native tool installation - -.NOTES -Returns the value of the NETCOREENG_INSTALL_DIRECTORY if that environment variable -is set, or otherwise returns an install directory under the %USERPROFILE% -#> -function Get-NativeInstallDirectory { - $InstallDir = $Env:NETCOREENG_INSTALL_DIRECTORY - if (!$InstallDir) { - $InstallDir = Join-Path $Env:USERPROFILE ".netcoreeng/native/" - } - return $InstallDir -} - -<# -.SYNOPSIS -Unzip an archive - -.DESCRIPTION -Powershell module to unzip an archive to a specified directory - -.PARAMETER ZipPath (Required) -Path to archive to unzip - -.PARAMETER OutputDirectory (Required) -Output directory for archive contents - -.PARAMETER Force -Overwrite output directory contents if they already exist - -.NOTES -- Returns True and does not perform an extraction if output directory already exists but Overwrite is not True. -- Returns True if unzip operation is successful -- Returns False if Overwrite is True and it is unable to remove contents of OutputDirectory -- Returns False if unable to extract zip archive -#> -function Expand-Zip { - [CmdletBinding(PositionalBinding=$false)] - Param ( - [Parameter(Mandatory=$True)] - [string] $ZipPath, - [Parameter(Mandatory=$True)] - [string] $OutputDirectory, - [switch] $Force - ) - - Write-Verbose "Extracting '$ZipPath' to '$OutputDirectory'" - try { - if ((Test-Path $OutputDirectory) -And (-Not $Force)) { - Write-Host "Directory '$OutputDirectory' already exists, skipping extract" - return $True - } - if (Test-Path $OutputDirectory) { - Write-Verbose "'Force' is 'True', but '$OutputDirectory' exists, removing directory" - Remove-Item $OutputDirectory -Force -Recurse - if ($? -Eq $False) { - Write-Error "Unable to remove '$OutputDirectory'" - return $False - } - } - - $TempOutputDirectory = Join-Path "$(Split-Path -Parent $OutputDirectory)" "$(Split-Path -Leaf $OutputDirectory).tmp" - if (Test-Path $TempOutputDirectory) { - Remove-Item $TempOutputDirectory -Force -Recurse - } - New-Item -Path $TempOutputDirectory -Force -ItemType "Directory" | Out-Null - - Add-Type -assembly "system.io.compression.filesystem" - [io.compression.zipfile]::ExtractToDirectory("$ZipPath", "$TempOutputDirectory") - if ($? -Eq $False) { - Write-Error "Unable to extract '$ZipPath'" - return $False - } - - Move-Item -Path $TempOutputDirectory -Destination $OutputDirectory - } - catch { - Write-Host $_ - Write-Host $_.Exception - - return $False - } - return $True -} - -export-modulemember -function DownloadAndExtract -export-modulemember -function Expand-Zip -export-modulemember -function Get-File -export-modulemember -function Get-MachineArchitecture -export-modulemember -function Get-NativeInstallDirectory -export-modulemember -function Get-TempDirectory -export-modulemember -function Get-TempPathFilename -export-modulemember -function New-ScriptShim diff --git a/eng/common/native/common-library.sh b/eng/common/native/common-library.sh deleted file mode 100755 index 080c2c283..000000000 --- a/eng/common/native/common-library.sh +++ /dev/null @@ -1,172 +0,0 @@ -#!/usr/bin/env bash - -function GetNativeInstallDirectory { - local install_dir - - if [[ -z $NETCOREENG_INSTALL_DIRECTORY ]]; then - install_dir=$HOME/.netcoreeng/native/ - else - install_dir=$NETCOREENG_INSTALL_DIRECTORY - fi - - echo $install_dir - return 0 -} - -function GetTempDirectory { - - echo $(GetNativeInstallDirectory)temp/ - return 0 -} - -function ExpandZip { - local zip_path=$1 - local output_directory=$2 - local force=${3:-false} - - echo "Extracting $zip_path to $output_directory" - if [[ -d $output_directory ]] && [[ $force = false ]]; then - echo "Directory '$output_directory' already exists, skipping extract" - return 0 - fi - - if [[ -d $output_directory ]]; then - echo "'Force flag enabled, but '$output_directory' exists. Removing directory" - rm -rf $output_directory - if [[ $? != 0 ]]; then - Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to remove '$output_directory'" - return 1 - fi - fi - - echo "Creating directory: '$output_directory'" - mkdir -p $output_directory - - echo "Extracting archive" - tar -xf $zip_path -C $output_directory - if [[ $? != 0 ]]; then - Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to extract '$zip_path'" - return 1 - fi - - return 0 -} - -function GetCurrentOS { - local unameOut="$(uname -s)" - case $unameOut in - Linux*) echo "Linux";; - Darwin*) echo "MacOS";; - esac - return 0 -} - -function GetFile { - local uri=$1 - local path=$2 - local force=${3:-false} - local download_retries=${4:-5} - local retry_wait_time_seconds=${5:-30} - - if [[ -f $path ]]; then - if [[ $force = false ]]; then - echo "File '$path' already exists. Skipping download" - return 0 - else - rm -rf $path - fi - fi - - if [[ -f $uri ]]; then - echo "'$uri' is a file path, copying file to '$path'" - cp $uri $path - return $? - fi - - echo "Downloading $uri" - # Use curl if available, otherwise use wget - if command -v curl > /dev/null; then - curl "$uri" -sSL --retry $download_retries --retry-delay $retry_wait_time_seconds --create-dirs -o "$path" --fail - else - wget -q -O "$path" "$uri" --tries="$download_retries" - fi - - return $? -} - -function GetTempPathFileName { - local path=$1 - - local temp_dir=$(GetTempDirectory) - local temp_file_name=$(basename $path) - echo $temp_dir$temp_file_name - return 0 -} - -function DownloadAndExtract { - local uri=$1 - local installDir=$2 - local force=${3:-false} - local download_retries=${4:-5} - local retry_wait_time_seconds=${5:-30} - - local temp_tool_path=$(GetTempPathFileName $uri) - - echo "downloading to: $temp_tool_path" - - # Download file - GetFile "$uri" "$temp_tool_path" $force $download_retries $retry_wait_time_seconds - if [[ $? != 0 ]]; then - Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to download '$uri' to '$temp_tool_path'." - return 1 - fi - - # Extract File - echo "extracting from $temp_tool_path to $installDir" - ExpandZip "$temp_tool_path" "$installDir" $force $download_retries $retry_wait_time_seconds - if [[ $? != 0 ]]; then - Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to extract '$temp_tool_path' to '$installDir'." - return 1 - fi - - return 0 -} - -function NewScriptShim { - local shimpath=$1 - local tool_file_path=$2 - local force=${3:-false} - - echo "Generating '$shimpath' shim" - if [[ -f $shimpath ]]; then - if [[ $force = false ]]; then - echo "File '$shimpath' already exists." >&2 - return 1 - else - rm -rf $shimpath - fi - fi - - if [[ ! -f $tool_file_path ]]; then - # try to see if the path is lower cased - tool_file_path="$(echo $tool_file_path | tr "[:upper:]" "[:lower:]")" - if [[ ! -f $tool_file_path ]]; then - Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Specified tool file path:'$tool_file_path' does not exist" - return 1 - fi - fi - - local shim_contents=$'#!/usr/bin/env bash\n' - shim_contents+="SHIMARGS="$'$1\n' - shim_contents+="$tool_file_path"$' $SHIMARGS\n' - - # Write shim file - echo "$shim_contents" > $shimpath - - chmod +x $shimpath - - echo "Finished generating shim '$shimpath'" - - return $? -} - diff --git a/eng/common/native/init-compiler.sh b/eng/common/native/init-compiler.sh deleted file mode 100755 index 9a0e1f2b4..000000000 --- a/eng/common/native/init-compiler.sh +++ /dev/null @@ -1,146 +0,0 @@ -#!/bin/sh -# -# This file detects the C/C++ compiler and exports it to the CC/CXX environment variables -# -# NOTE: some scripts source this file and rely on stdout being empty, make sure -# to not output *anything* here, unless it is an error message that fails the -# build. - -if [ -z "$build_arch" ] || [ -z "$compiler" ]; then - echo "Usage..." - echo "build_arch= compiler= init-compiler.sh" - echo "Specify the target architecture." - echo "Specify the name of compiler (clang or gcc)." - exit 1 -fi - -case "$compiler" in - clang*|-clang*|--clang*) - # clangx.y or clang-x.y - version="$(echo "$compiler" | tr -d '[:alpha:]-=')" - majorVersion="${version%%.*}" - - # LLVM based on v18 released in early 2024, with two releases per year - maxVersion="$((18 + ((($(date +%Y) - 2024) * 12 + $(date +%-m) - 3) / 6)))" - compiler=clang - ;; - - gcc*|-gcc*|--gcc*) - # gccx.y or gcc-x.y - version="$(echo "$compiler" | tr -d '[:alpha:]-=')" - majorVersion="${version%%.*}" - - # GCC based on v14 released in early 2024, with one release per year - maxVersion="$((14 + ((($(date +%Y) - 2024) * 12 + $(date +%-m) - 3) / 12)))" - compiler=gcc - ;; -esac - -cxxCompiler="$compiler++" - -# clear the existing CC and CXX from environment -CC= -CXX= -LDFLAGS= - -if [ "$compiler" = "gcc" ]; then cxxCompiler="g++"; fi - -check_version_exists() { - desired_version=-1 - - # Set up the environment to be used for building with the desired compiler. - if command -v "$compiler-$1" > /dev/null; then - desired_version="-$1" - elif command -v "$compiler$1" > /dev/null; then - desired_version="$1" - fi - - echo "$desired_version" -} - -__baseOS="$(uname)" -set_compiler_version_from_CC() { - if [ "$__baseOS" = "Darwin" ]; then - # On Darwin, the versions from -version/-dumpversion refer to Xcode - # versions, not llvm versions, so we can't rely on them. - return - fi - - version="$("$CC" -dumpversion)" - if [ -z "$version" ]; then - echo "Error: $CC -dumpversion didn't provide a version" - exit 1 - fi - - # gcc and clang often display 3 part versions. However, gcc can show only 1 part in some environments. - IFS=. read -r majorVersion _ < /dev/null; then - echo "Error: No compatible version of $compiler was found within the range of $minVersion to $maxVersion. Please upgrade your toolchain or specify the compiler explicitly using CLR_CC and CLR_CXX environment variables." - exit 1 - fi - - CC="$(command -v "$compiler" 2> /dev/null)" - CXX="$(command -v "$cxxCompiler" 2> /dev/null)" - set_compiler_version_from_CC - fi - else - desired_version="$(check_version_exists "$majorVersion")" - if [ "$desired_version" = "-1" ]; then - echo "Error: Could not find specific version of $compiler: $majorVersion." - exit 1 - fi - fi - - if [ -z "$CC" ]; then - CC="$(command -v "$compiler$desired_version" 2> /dev/null)" - CXX="$(command -v "$cxxCompiler$desired_version" 2> /dev/null)" - if [ -z "$CXX" ]; then CXX="$(command -v "$cxxCompiler" 2> /dev/null)"; fi - set_compiler_version_from_CC - fi -else - if [ ! -f "$CLR_CC" ]; then - echo "Error: CLR_CC is set but path '$CLR_CC' does not exist" - exit 1 - fi - CC="$CLR_CC" - CXX="$CLR_CXX" - set_compiler_version_from_CC -fi - -if [ -z "$CC" ]; then - echo "Error: Unable to find $compiler." - exit 1 -fi - -if [ "$__baseOS" != "Darwin" ]; then - # On Darwin, we always want to use the Apple linker. - - # Only lld version >= 9 can be considered stable. lld supports s390x starting from 18.0. - if [ "$compiler" = "clang" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -ge 9 ] && { [ "$build_arch" != "s390x" ] || [ "$majorVersion" -ge 18 ]; }; then - if "$CC" -fuse-ld=lld -Wl,--version >/dev/null 2>&1; then - LDFLAGS="-fuse-ld=lld" - fi - fi -fi - -SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version" 2> /dev/null)" - -export CC CXX LDFLAGS SCAN_BUILD_COMMAND diff --git a/eng/common/native/init-distro-rid.sh b/eng/common/native/init-distro-rid.sh deleted file mode 100755 index 83ea7aab0..000000000 --- a/eng/common/native/init-distro-rid.sh +++ /dev/null @@ -1,110 +0,0 @@ -#!/bin/sh - -# getNonPortableDistroRid -# -# Input: -# targetOs: (str) -# targetArch: (str) -# rootfsDir: (str) -# -# Return: -# non-portable rid -getNonPortableDistroRid() -{ - targetOs="$1" - targetArch="$2" - rootfsDir="$3" - nonPortableRid="" - - if [ "$targetOs" = "linux" ]; then - # shellcheck disable=SC1091 - if [ -e "${rootfsDir}/etc/os-release" ]; then - . "${rootfsDir}/etc/os-release" - if echo "${VERSION_ID:-}" | grep -qE '^([[:digit:]]|\.)+$'; then - nonPortableRid="${ID}.${VERSION_ID}-${targetArch}" - else - # Rolling release distros either do not set VERSION_ID, set it as blank or - # set it to non-version looking string (such as TEMPLATE_VERSION_ID on ArchLinux); - # so omit it here to be consistent with everything else. - nonPortableRid="${ID}-${targetArch}" - fi - elif [ -e "${rootfsDir}/android_platform" ]; then - # shellcheck disable=SC1091 - . "${rootfsDir}/android_platform" - nonPortableRid="$RID" - fi - fi - - if [ "$targetOs" = "freebsd" ]; then - # $rootfsDir can be empty. freebsd-version is a shell script and should always work. - __freebsd_major_version=$("$rootfsDir"/bin/freebsd-version | cut -d'.' -f1) - nonPortableRid="freebsd.$__freebsd_major_version-${targetArch}" - elif command -v getprop >/dev/null && getprop ro.product.system.model | grep -qi android; then - __android_sdk_version=$(getprop ro.build.version.sdk) - nonPortableRid="android.$__android_sdk_version-${targetArch}" - elif [ "$targetOs" = "illumos" ]; then - __uname_version=$(uname -v) - nonPortableRid="illumos-${targetArch}" - elif [ "$targetOs" = "solaris" ]; then - __uname_version=$(uname -v) - __solaris_major_version=$(echo "$__uname_version" | cut -d'.' -f1) - nonPortableRid="solaris.$__solaris_major_version-${targetArch}" - elif [ "$targetOs" = "haiku" ]; then - __uname_release="$(uname -r)" - nonPortableRid=haiku.r"$__uname_release"-"$targetArch" - fi - - echo "$nonPortableRid" | tr '[:upper:]' '[:lower:]' -} - -# initDistroRidGlobal -# -# Input: -# os: (str) -# arch: (str) -# rootfsDir?: (nullable:string) -# -# Return: -# None -# -# Notes: -# It is important to note that the function does not return anything, but it -# exports the following variables on success: -# __DistroRid : Non-portable rid of the target platform. -# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform. -initDistroRidGlobal() -{ - targetOs="$1" - targetArch="$2" - rootfsDir="" - if [ $# -ge 3 ]; then - rootfsDir="$3" - fi - - if [ -n "${rootfsDir}" ]; then - # We may have a cross build. Check for the existence of the rootfsDir - if [ ! -e "${rootfsDir}" ]; then - echo "Error: rootfsDir has been passed, but the location is not valid." - exit 1 - fi - fi - - __DistroRid=$(getNonPortableDistroRid "${targetOs}" "${targetArch}" "${rootfsDir}") - - if [ -z "${__PortableTargetOS:-}" ]; then - __PortableTargetOS="$targetOs" - - STRINGS="$(command -v strings || true)" - if [ -z "$STRINGS" ]; then - STRINGS="$(command -v llvm-strings || true)" - fi - - # Check for musl-based distros (e.g. Alpine Linux, Void Linux). - if "${rootfsDir}/usr/bin/ldd" --version 2>&1 | grep -q musl || - ( [ -n "$STRINGS" ] && "$STRINGS" "${rootfsDir}/usr/bin/ldd" 2>&1 | grep -q musl ); then - __PortableTargetOS="linux-musl" - fi - fi - - export __DistroRid __PortableTargetOS -} diff --git a/eng/common/native/init-os-and-arch.sh b/eng/common/native/init-os-and-arch.sh deleted file mode 100755 index 38921d433..000000000 --- a/eng/common/native/init-os-and-arch.sh +++ /dev/null @@ -1,85 +0,0 @@ -#!/bin/sh - -# Use uname to determine what the OS is. -OSName=$(uname -s | tr '[:upper:]' '[:lower:]') - -if command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then - OSName="android" -fi - -case "$OSName" in -freebsd|linux|netbsd|openbsd|sunos|android|haiku) - os="$OSName" ;; -darwin) - os=osx ;; -*) - echo "Unsupported OS $OSName detected!" - exit 1 ;; -esac - -# On Solaris, `uname -m` is discouraged, see https://docs.oracle.com/cd/E36784_01/html/E36870/uname-1.html -# and `uname -p` returns processor type (e.g. i386 on amd64). -# The appropriate tool to determine CPU is isainfo(1) https://docs.oracle.com/cd/E36784_01/html/E36870/isainfo-1.html. -if [ "$os" = "sunos" ]; then - if uname -o 2>&1 | grep -q illumos; then - os="illumos" - else - os="solaris" - fi - CPUName=$(isainfo -n) -else - # For the rest of the operating systems, use uname(1) to determine what the CPU is. - CPUName=$(uname -m) -fi - -case "$CPUName" in - arm64|aarch64) - arch=arm64 - if [ "$(getconf LONG_BIT)" -lt 64 ]; then - # This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS) - arch=arm - fi - ;; - - loongarch64) - arch=loongarch64 - ;; - - riscv64) - arch=riscv64 - ;; - - amd64|x86_64) - arch=x64 - ;; - - armv7l|armv8l) - # shellcheck disable=SC1091 - if (NAME=""; . /etc/os-release; test "$NAME" = "Tizen"); then - arch=armel - else - arch=arm - fi - ;; - - armv6l) - arch=armv6 - ;; - - i[3-6]86) - echo "Unsupported CPU $CPUName detected, build might not succeed!" - arch=x86 - ;; - - s390x) - arch=s390x - ;; - - ppc64le) - arch=ppc64le - ;; - *) - echo "Unknown CPU $CPUName detected!" - exit 1 - ;; -esac diff --git a/eng/common/native/install-cmake-test.sh b/eng/common/native/install-cmake-test.sh deleted file mode 100755 index 8a5e7cf0d..000000000 --- a/eng/common/native/install-cmake-test.sh +++ /dev/null @@ -1,117 +0,0 @@ -#!/usr/bin/env bash - -source="${BASH_SOURCE[0]}" -scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - -. $scriptroot/common-library.sh - -base_uri= -install_path= -version= -clean=false -force=false -download_retries=5 -retry_wait_time_seconds=30 - -while (($# > 0)); do - lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" - case $lowerI in - --baseuri) - base_uri=$2 - shift 2 - ;; - --installpath) - install_path=$2 - shift 2 - ;; - --version) - version=$2 - shift 2 - ;; - --clean) - clean=true - shift 1 - ;; - --force) - force=true - shift 1 - ;; - --downloadretries) - download_retries=$2 - shift 2 - ;; - --retrywaittimeseconds) - retry_wait_time_seconds=$2 - shift 2 - ;; - --help) - echo "Common settings:" - echo " --baseuri Base file directory or Url wrom which to acquire tool archives" - echo " --installpath Base directory to install native tool to" - echo " --clean Don't install the tool, just clean up the current install of the tool" - echo " --force Force install of tools even if they previously exist" - echo " --help Print help and exit" - echo "" - echo "Advanced settings:" - echo " --downloadretries Total number of retry attempts" - echo " --retrywaittimeseconds Wait time between retry attempts in seconds" - echo "" - exit 0 - ;; - esac -done - -tool_name="cmake-test" -tool_os=$(GetCurrentOS) -tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")" -tool_arch="x86_64" -tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch" -tool_install_directory="$install_path/$tool_name/$version" -tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name" -shim_path="$install_path/$tool_name.sh" -uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz" - -# Clean up tool and installers -if [[ $clean = true ]]; then - echo "Cleaning $tool_install_directory" - if [[ -d $tool_install_directory ]]; then - rm -rf $tool_install_directory - fi - - echo "Cleaning $shim_path" - if [[ -f $shim_path ]]; then - rm -rf $shim_path - fi - - tool_temp_path=$(GetTempPathFileName $uri) - echo "Cleaning $tool_temp_path" - if [[ -f $tool_temp_path ]]; then - rm -rf $tool_temp_path - fi - - exit 0 -fi - -# Install tool -if [[ -f $tool_file_path ]] && [[ $force = false ]]; then - echo "$tool_name ($version) already exists, skipping install" - exit 0 -fi - -DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds - -if [[ $? != 0 ]]; then - Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed' - exit 1 -fi - -# Generate Shim -# Always rewrite shims so that we are referencing the expected version -NewScriptShim $shim_path $tool_file_path true - -if [[ $? != 0 ]]; then - Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed' - exit 1 -fi - -exit 0 diff --git a/eng/common/native/install-cmake.sh b/eng/common/native/install-cmake.sh deleted file mode 100755 index de496beeb..000000000 --- a/eng/common/native/install-cmake.sh +++ /dev/null @@ -1,117 +0,0 @@ -#!/usr/bin/env bash - -source="${BASH_SOURCE[0]}" -scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - -. $scriptroot/common-library.sh - -base_uri= -install_path= -version= -clean=false -force=false -download_retries=5 -retry_wait_time_seconds=30 - -while (($# > 0)); do - lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" - case $lowerI in - --baseuri) - base_uri=$2 - shift 2 - ;; - --installpath) - install_path=$2 - shift 2 - ;; - --version) - version=$2 - shift 2 - ;; - --clean) - clean=true - shift 1 - ;; - --force) - force=true - shift 1 - ;; - --downloadretries) - download_retries=$2 - shift 2 - ;; - --retrywaittimeseconds) - retry_wait_time_seconds=$2 - shift 2 - ;; - --help) - echo "Common settings:" - echo " --baseuri Base file directory or Url wrom which to acquire tool archives" - echo " --installpath Base directory to install native tool to" - echo " --clean Don't install the tool, just clean up the current install of the tool" - echo " --force Force install of tools even if they previously exist" - echo " --help Print help and exit" - echo "" - echo "Advanced settings:" - echo " --downloadretries Total number of retry attempts" - echo " --retrywaittimeseconds Wait time between retry attempts in seconds" - echo "" - exit 0 - ;; - esac -done - -tool_name="cmake" -tool_os=$(GetCurrentOS) -tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")" -tool_arch="x86_64" -tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch" -tool_install_directory="$install_path/$tool_name/$version" -tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name" -shim_path="$install_path/$tool_name.sh" -uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz" - -# Clean up tool and installers -if [[ $clean = true ]]; then - echo "Cleaning $tool_install_directory" - if [[ -d $tool_install_directory ]]; then - rm -rf $tool_install_directory - fi - - echo "Cleaning $shim_path" - if [[ -f $shim_path ]]; then - rm -rf $shim_path - fi - - tool_temp_path=$(GetTempPathFileName $uri) - echo "Cleaning $tool_temp_path" - if [[ -f $tool_temp_path ]]; then - rm -rf $tool_temp_path - fi - - exit 0 -fi - -# Install tool -if [[ -f $tool_file_path ]] && [[ $force = false ]]; then - echo "$tool_name ($version) already exists, skipping install" - exit 0 -fi - -DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds - -if [[ $? != 0 ]]; then - Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed' - exit 1 -fi - -# Generate Shim -# Always rewrite shims so that we are referencing the expected version -NewScriptShim $shim_path $tool_file_path true - -if [[ $? != 0 ]]; then - Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed' - exit 1 -fi - -exit 0 diff --git a/eng/common/native/install-dependencies.sh b/eng/common/native/install-dependencies.sh deleted file mode 100644 index 477a44f33..000000000 --- a/eng/common/native/install-dependencies.sh +++ /dev/null @@ -1,62 +0,0 @@ -#!/bin/sh - -set -e - -# This is a simple script primarily used for CI to install necessary dependencies -# -# Usage: -# -# ./install-dependencies.sh - -os="$(echo "$1" | tr "[:upper:]" "[:lower:]")" - -if [ -z "$os" ]; then - . "$(dirname "$0")"/init-os-and-arch.sh -fi - -case "$os" in - linux) - if [ -e /etc/os-release ]; then - . /etc/os-release - fi - - if [ "$ID" = "debian" ] || [ "$ID_LIKE" = "debian" ]; then - apt update - - apt install -y build-essential gettext locales cmake llvm clang lld lldb liblldb-dev libunwind8-dev libicu-dev liblttng-ust-dev \ - libssl-dev libkrb5-dev pigz cpio - - localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8 - elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ] || [ "$ID" = "azurelinux" ]; then - pkg_mgr="$(command -v tdnf 2>/dev/null || command -v dnf)" - $pkg_mgr install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio - elif [ "$ID" = "alpine" ]; then - apk add build-base cmake bash curl clang llvm-dev lld lldb krb5-dev lttng-ust-dev icu-dev openssl-dev pigz cpio - else - echo "Unsupported distro. distro: $ID" - exit 1 - fi - ;; - - osx|maccatalyst|ios|iossimulator|tvos|tvossimulator) - echo "Installed xcode version: $(xcode-select -p)" - - export HOMEBREW_NO_INSTALL_CLEANUP=1 - export HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1 - # Skip brew update for now, see https://github.com/actions/setup-python/issues/577 - # brew update --preinstall - brew bundle --no-upgrade --file=- < -[CmdletBinding(PositionalBinding=$false)] -Param ( - [Parameter(Mandatory=$True)] - [string] $ToolName, - [Parameter(Mandatory=$True)] - [string] $InstallPath, - [Parameter(Mandatory=$True)] - [string] $BaseUri, - [Parameter(Mandatory=$True)] - [string] $Version, - [string] $CommonLibraryDirectory = $PSScriptRoot, - [switch] $Force = $False, - [switch] $Clean = $False, - [int] $DownloadRetries = 5, - [int] $RetryWaitTimeInSeconds = 30 -) - -. $PSScriptRoot\..\pipeline-logging-functions.ps1 - -# Import common library modules -Import-Module -Name (Join-Path $CommonLibraryDirectory "CommonLibrary.psm1") - -try { - # Define verbose switch if undefined - $Verbose = $VerbosePreference -Eq "Continue" - - $Arch = CommonLibrary\Get-MachineArchitecture - $ToolOs = "win64" - if($Arch -Eq "x32") { - $ToolOs = "win32" - } - $ToolNameMoniker = "$ToolName-$Version-$ToolOs-$Arch" - $ToolInstallDirectory = Join-Path $InstallPath "$ToolName\$Version\" - $Uri = "$BaseUri/windows/$ToolName/$ToolNameMoniker.zip" - $ShimPath = Join-Path $InstallPath "$ToolName.exe" - - if ($Clean) { - Write-Host "Cleaning $ToolInstallDirectory" - if (Test-Path $ToolInstallDirectory) { - Remove-Item $ToolInstallDirectory -Force -Recurse - } - Write-Host "Cleaning $ShimPath" - if (Test-Path $ShimPath) { - Remove-Item $ShimPath -Force - } - $ToolTempPath = CommonLibrary\Get-TempPathFilename -Path $Uri - Write-Host "Cleaning $ToolTempPath" - if (Test-Path $ToolTempPath) { - Remove-Item $ToolTempPath -Force - } - exit 0 - } - - # Install tool - if ((Test-Path $ToolInstallDirectory) -And (-Not $Force)) { - Write-Verbose "$ToolName ($Version) already exists, skipping install" - } - else { - $InstallStatus = CommonLibrary\DownloadAndExtract -Uri $Uri ` - -InstallDirectory $ToolInstallDirectory ` - -Force:$Force ` - -DownloadRetries $DownloadRetries ` - -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds ` - -Verbose:$Verbose - - if ($InstallStatus -Eq $False) { - Write-PipelineTelemetryError "Installation failed" -Category "NativeToolsetBootstrapping" - exit 1 - } - } - - $ToolFilePath = Get-ChildItem $ToolInstallDirectory -Recurse -Filter "$ToolName.exe" | % { $_.FullName } - if (@($ToolFilePath).Length -Gt 1) { - Write-Error "There are multiple copies of $ToolName in $($ToolInstallDirectory): `n$(@($ToolFilePath | out-string))" - exit 1 - } elseif (@($ToolFilePath).Length -Lt 1) { - Write-Host "$ToolName was not found in $ToolInstallDirectory." - exit 1 - } - - # Generate shim - # Always rewrite shims so that we are referencing the expected version - $GenerateShimStatus = CommonLibrary\New-ScriptShim -ShimName $ToolName ` - -ShimDirectory $InstallPath ` - -ToolFilePath "$ToolFilePath" ` - -BaseUri $BaseUri ` - -Force:$Force ` - -Verbose:$Verbose - - if ($GenerateShimStatus -Eq $False) { - Write-PipelineTelemetryError "Generate shim failed" -Category "NativeToolsetBootstrapping" - return 1 - } - - exit 0 -} -catch { - Write-Host $_.ScriptStackTrace - Write-PipelineTelemetryError -Category "NativeToolsetBootstrapping" -Message $_ - exit 1 -} diff --git a/eng/common/pipeline-logging-functions.ps1 b/eng/common/pipeline-logging-functions.ps1 deleted file mode 100644 index 8e422c561..000000000 --- a/eng/common/pipeline-logging-functions.ps1 +++ /dev/null @@ -1,260 +0,0 @@ -# Source for this file was taken from https://github.com/microsoft/azure-pipelines-task-lib/blob/11c9439d4af17e6475d9fe058e6b2e03914d17e6/powershell/VstsTaskSdk/LoggingCommandFunctions.ps1 and modified. - -# NOTE: You should not be calling these method directly as they are likely to change. Instead you should be calling the Write-Pipeline* functions defined in tools.ps1 - -$script:loggingCommandPrefix = '##vso[' -$script:loggingCommandEscapeMappings = @( # TODO: WHAT ABOUT "="? WHAT ABOUT "%"? - New-Object psobject -Property @{ Token = ';' ; Replacement = '%3B' } - New-Object psobject -Property @{ Token = "`r" ; Replacement = '%0D' } - New-Object psobject -Property @{ Token = "`n" ; Replacement = '%0A' } - New-Object psobject -Property @{ Token = "]" ; Replacement = '%5D' } -) -# TODO: BUG: Escape % ??? -# TODO: Add test to verify don't need to escape "=". - -# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set -function Write-PipelineTelemetryError { - [CmdletBinding()] - param( - [Parameter(Mandatory = $true)] - [string]$Category, - [Parameter(Mandatory = $true)] - [string]$Message, - [Parameter(Mandatory = $false)] - [string]$Type = 'error', - [string]$ErrCode, - [string]$SourcePath, - [string]$LineNumber, - [string]$ColumnNumber, - [switch]$AsOutput, - [switch]$Force) - - $PSBoundParameters.Remove('Category') | Out-Null - - if ($Force -Or ((Test-Path variable:ci) -And $ci)) { - $Message = "(NETCORE_ENGINEERING_TELEMETRY=$Category) $Message" - } - $PSBoundParameters.Remove('Message') | Out-Null - $PSBoundParameters.Add('Message', $Message) - Write-PipelineTaskError @PSBoundParameters -} - -# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set -function Write-PipelineTaskError { - [CmdletBinding()] - param( - [Parameter(Mandatory = $true)] - [string]$Message, - [Parameter(Mandatory = $false)] - [string]$Type = 'error', - [string]$ErrCode, - [string]$SourcePath, - [string]$LineNumber, - [string]$ColumnNumber, - [switch]$AsOutput, - [switch]$Force - ) - - if (!$Force -And (-Not (Test-Path variable:ci) -Or !$ci)) { - if ($Type -eq 'error') { - Write-Host $Message -ForegroundColor Red - return - } - elseif ($Type -eq 'warning') { - Write-Host $Message -ForegroundColor Yellow - return - } - } - - if (($Type -ne 'error') -and ($Type -ne 'warning')) { - Write-Host $Message - return - } - $PSBoundParameters.Remove('Force') | Out-Null - if (-not $PSBoundParameters.ContainsKey('Type')) { - $PSBoundParameters.Add('Type', 'error') - } - Write-LogIssue @PSBoundParameters -} - -function Write-PipelineSetVariable { - [CmdletBinding()] - param( - [Parameter(Mandatory = $true)] - [string]$Name, - [string]$Value, - [switch]$Secret, - [switch]$AsOutput, - [bool]$IsMultiJobVariable = $true) - - if ((Test-Path variable:ci) -And $ci) { - Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $Value -Properties @{ - 'variable' = $Name - 'isSecret' = $Secret - 'isOutput' = $IsMultiJobVariable - } -AsOutput:$AsOutput - } -} - -function Write-PipelinePrependPath { - [CmdletBinding()] - param( - [Parameter(Mandatory = $true)] - [string]$Path, - [switch]$AsOutput) - - if ((Test-Path variable:ci) -And $ci) { - Write-LoggingCommand -Area 'task' -Event 'prependpath' -Data $Path -AsOutput:$AsOutput - } -} - -function Write-PipelineSetResult { - [CmdletBinding()] - param( - [ValidateSet("Succeeded", "SucceededWithIssues", "Failed", "Cancelled", "Skipped")] - [Parameter(Mandatory = $true)] - [string]$Result, - [string]$Message) - if ((Test-Path variable:ci) -And $ci) { - Write-LoggingCommand -Area 'task' -Event 'complete' -Data $Message -Properties @{ - 'result' = $Result - } - } -} - -<######################################## -# Private functions. -########################################> -function Format-LoggingCommandData { - [CmdletBinding()] - param([string]$Value, [switch]$Reverse) - - if (!$Value) { - return '' - } - - if (!$Reverse) { - foreach ($mapping in $script:loggingCommandEscapeMappings) { - $Value = $Value.Replace($mapping.Token, $mapping.Replacement) - } - } - else { - for ($i = $script:loggingCommandEscapeMappings.Length - 1 ; $i -ge 0 ; $i--) { - $mapping = $script:loggingCommandEscapeMappings[$i] - $Value = $Value.Replace($mapping.Replacement, $mapping.Token) - } - } - - return $Value -} - -function Format-LoggingCommand { - [CmdletBinding()] - param( - [Parameter(Mandatory = $true)] - [string]$Area, - [Parameter(Mandatory = $true)] - [string]$Event, - [string]$Data, - [hashtable]$Properties) - - # Append the preamble. - [System.Text.StringBuilder]$sb = New-Object -TypeName System.Text.StringBuilder - $null = $sb.Append($script:loggingCommandPrefix).Append($Area).Append('.').Append($Event) - - # Append the properties. - if ($Properties) { - $first = $true - foreach ($key in $Properties.Keys) { - [string]$value = Format-LoggingCommandData $Properties[$key] - if ($value) { - if ($first) { - $null = $sb.Append(' ') - $first = $false - } - else { - $null = $sb.Append(';') - } - - $null = $sb.Append("$key=$value") - } - } - } - - # Append the tail and output the value. - $Data = Format-LoggingCommandData $Data - $sb.Append(']').Append($Data).ToString() -} - -function Write-LoggingCommand { - [CmdletBinding(DefaultParameterSetName = 'Parameters')] - param( - [Parameter(Mandatory = $true, ParameterSetName = 'Parameters')] - [string]$Area, - [Parameter(Mandatory = $true, ParameterSetName = 'Parameters')] - [string]$Event, - [Parameter(ParameterSetName = 'Parameters')] - [string]$Data, - [Parameter(ParameterSetName = 'Parameters')] - [hashtable]$Properties, - [Parameter(Mandatory = $true, ParameterSetName = 'Object')] - $Command, - [switch]$AsOutput) - - if ($PSCmdlet.ParameterSetName -eq 'Object') { - Write-LoggingCommand -Area $Command.Area -Event $Command.Event -Data $Command.Data -Properties $Command.Properties -AsOutput:$AsOutput - return - } - - $command = Format-LoggingCommand -Area $Area -Event $Event -Data $Data -Properties $Properties - if ($AsOutput) { - $command - } - else { - Write-Host $command - } -} - -function Write-LogIssue { - [CmdletBinding()] - param( - [ValidateSet('warning', 'error')] - [Parameter(Mandatory = $true)] - [string]$Type, - [string]$Message, - [string]$ErrCode, - [string]$SourcePath, - [string]$LineNumber, - [string]$ColumnNumber, - [switch]$AsOutput) - - $command = Format-LoggingCommand -Area 'task' -Event 'logissue' -Data $Message -Properties @{ - 'type' = $Type - 'code' = $ErrCode - 'sourcepath' = $SourcePath - 'linenumber' = $LineNumber - 'columnnumber' = $ColumnNumber - } - if ($AsOutput) { - return $command - } - - if ($Type -eq 'error') { - $foregroundColor = $host.PrivateData.ErrorForegroundColor - $backgroundColor = $host.PrivateData.ErrorBackgroundColor - if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) { - $foregroundColor = [System.ConsoleColor]::Red - $backgroundColor = [System.ConsoleColor]::Black - } - } - else { - $foregroundColor = $host.PrivateData.WarningForegroundColor - $backgroundColor = $host.PrivateData.WarningBackgroundColor - if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) { - $foregroundColor = [System.ConsoleColor]::Yellow - $backgroundColor = [System.ConsoleColor]::Black - } - } - - Write-Host $command -ForegroundColor $foregroundColor -BackgroundColor $backgroundColor -} diff --git a/eng/common/pipeline-logging-functions.sh b/eng/common/pipeline-logging-functions.sh deleted file mode 100755 index 6a0b2255e..000000000 --- a/eng/common/pipeline-logging-functions.sh +++ /dev/null @@ -1,206 +0,0 @@ -#!/usr/bin/env bash - -function Write-PipelineTelemetryError { - local telemetry_category='' - local force=false - local function_args=() - local message='' - while [[ $# -gt 0 ]]; do - opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" - case "$opt" in - -category|-c) - telemetry_category=$2 - shift - ;; - -force|-f) - force=true - ;; - -*) - function_args+=("$1 $2") - shift - ;; - *) - message=$* - ;; - esac - shift - done - - if [[ $force != true ]] && [[ "$ci" != true ]]; then - echo "$message" >&2 - return - fi - - if [[ $force == true ]]; then - function_args+=("-force") - fi - message="(NETCORE_ENGINEERING_TELEMETRY=$telemetry_category) $message" - function_args+=("$message") - Write-PipelineTaskError ${function_args[@]} -} - -function Write-PipelineTaskError { - local message_type="error" - local sourcepath='' - local linenumber='' - local columnnumber='' - local error_code='' - local force=false - - while [[ $# -gt 0 ]]; do - opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" - case "$opt" in - -type|-t) - message_type=$2 - shift - ;; - -sourcepath|-s) - sourcepath=$2 - shift - ;; - -linenumber|-ln) - linenumber=$2 - shift - ;; - -columnnumber|-cn) - columnnumber=$2 - shift - ;; - -errcode|-e) - error_code=$2 - shift - ;; - -force|-f) - force=true - ;; - *) - break - ;; - esac - - shift - done - - if [[ $force != true ]] && [[ "$ci" != true ]]; then - echo "$@" >&2 - return - fi - - local message="##vso[task.logissue" - - message="$message type=$message_type" - - if [ -n "$sourcepath" ]; then - message="$message;sourcepath=$sourcepath" - fi - - if [ -n "$linenumber" ]; then - message="$message;linenumber=$linenumber" - fi - - if [ -n "$columnnumber" ]; then - message="$message;columnnumber=$columnnumber" - fi - - if [ -n "$error_code" ]; then - message="$message;code=$error_code" - fi - - message="$message]$*" - echo "$message" -} - -function Write-PipelineSetVariable { - if [[ "$ci" != true ]]; then - return - fi - - local name='' - local value='' - local secret=false - local as_output=false - local is_multi_job_variable=true - - while [[ $# -gt 0 ]]; do - opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" - case "$opt" in - -name|-n) - name=$2 - shift - ;; - -value|-v) - value=$2 - shift - ;; - -secret|-s) - secret=true - ;; - -as_output|-a) - as_output=true - ;; - -is_multi_job_variable|-i) - is_multi_job_variable=$2 - shift - ;; - esac - shift - done - - value=${value/;/%3B} - value=${value/\\r/%0D} - value=${value/\\n/%0A} - value=${value/]/%5D} - - local message="##vso[task.setvariable variable=$name;isSecret=$secret;isOutput=$is_multi_job_variable]$value" - - if [[ "$as_output" == true ]]; then - $message - else - echo "$message" - fi -} - -function Write-PipelinePrependPath { - local prepend_path='' - - while [[ $# -gt 0 ]]; do - opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" - case "$opt" in - -path|-p) - prepend_path=$2 - shift - ;; - esac - shift - done - - export PATH="$prepend_path:$PATH" - - if [[ "$ci" == true ]]; then - echo "##vso[task.prependpath]$prepend_path" - fi -} - -function Write-PipelineSetResult { - local result='' - local message='' - - while [[ $# -gt 0 ]]; do - opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" - case "$opt" in - -result|-r) - result=$2 - shift - ;; - -message|-m) - message=$2 - shift - ;; - esac - shift - done - - if [[ "$ci" == true ]]; then - echo "##vso[task.complete result=$result;]$message" - fi -} diff --git a/eng/common/post-build/check-channel-consistency.ps1 b/eng/common/post-build/check-channel-consistency.ps1 deleted file mode 100644 index 61208d2d1..000000000 --- a/eng/common/post-build/check-channel-consistency.ps1 +++ /dev/null @@ -1,48 +0,0 @@ -param( - [Parameter(Mandatory=$true)][string] $PromoteToChannels, # List of channels that the build should be promoted to - [Parameter(Mandatory=$true)][array] $AvailableChannelIds # List of channel IDs available in the YAML implementation -) - -try { - $ErrorActionPreference = 'Stop' - Set-StrictMode -Version 2.0 - - # `tools.ps1` checks $ci to perform some actions. Since the post-build - # scripts don't necessarily execute in the same agent that run the - # build.ps1/sh script this variable isn't automatically set. - $ci = $true - $disableConfigureToolsetImport = $true - . $PSScriptRoot\..\tools.ps1 - - if ($PromoteToChannels -eq "") { - Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/main/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info." - ExitWithExitCode 0 - } - - # Check that every channel that Maestro told to promote the build to - # is available in YAML - $PromoteToChannelsIds = $PromoteToChannels -split "\D" | Where-Object { $_ } - - $hasErrors = $false - - foreach ($id in $PromoteToChannelsIds) { - if (($id -ne 0) -and ($id -notin $AvailableChannelIds)) { - Write-PipelineTaskError -Message "Channel $id is not present in the post-build YAML configuration! This is an error scenario. Please contact @dnceng." - $hasErrors = $true - } - } - - # The `Write-PipelineTaskError` doesn't error the script and we might report several errors - # in the previous lines. The check below makes sure that we return an error state from the - # script if we reported any validation error - if ($hasErrors) { - ExitWithExitCode 1 - } - - Write-Host 'done.' -} -catch { - Write-Host $_ - Write-PipelineTelemetryError -Category 'CheckChannelConsistency' -Message "There was an error while trying to check consistency of Maestro default channels for the build and post-build YAML configuration." - ExitWithExitCode 1 -} diff --git a/eng/common/post-build/nuget-validation.ps1 b/eng/common/post-build/nuget-validation.ps1 deleted file mode 100644 index e5de00c89..000000000 --- a/eng/common/post-build/nuget-validation.ps1 +++ /dev/null @@ -1,22 +0,0 @@ -# This script validates NuGet package metadata information using this -# tool: https://github.com/NuGet/NuGetGallery/tree/jver-verify/src/VerifyMicrosoftPackage - -param( - [Parameter(Mandatory=$true)][string] $PackagesPath # Path to where the packages to be validated are -) - -# `tools.ps1` checks $ci to perform some actions. Since the post-build -# scripts don't necessarily execute in the same agent that run the -# build.ps1/sh script this variable isn't automatically set. -$ci = $true -$disableConfigureToolsetImport = $true -. $PSScriptRoot\..\tools.ps1 - -try { - & $PSScriptRoot\nuget-verification.ps1 ${PackagesPath}\*.nupkg -} -catch { - Write-Host $_.ScriptStackTrace - Write-PipelineTelemetryError -Category 'NuGetValidation' -Message $_ - ExitWithExitCode 1 -} diff --git a/eng/common/post-build/nuget-verification.ps1 b/eng/common/post-build/nuget-verification.ps1 deleted file mode 100644 index a365194a9..000000000 --- a/eng/common/post-build/nuget-verification.ps1 +++ /dev/null @@ -1,121 +0,0 @@ -<# -.SYNOPSIS - Verifies that Microsoft NuGet packages have proper metadata. -.DESCRIPTION - Downloads a verification tool and runs metadata validation on the provided NuGet packages. This script writes an - error if any of the provided packages fail validation. All arguments provided to this PowerShell script that do not - match PowerShell parameters are passed on to the verification tool downloaded during the execution of this script. -.PARAMETER NuGetExePath - The path to the nuget.exe binary to use. If not provided, nuget.exe will be downloaded into the -DownloadPath - directory. -.PARAMETER PackageSource - The package source to use to download the verification tool. If not provided, nuget.org will be used. -.PARAMETER DownloadPath - The directory path to download the verification tool and nuget.exe to. If not provided, - %TEMP%\NuGet.VerifyNuGetPackage will be used. -.PARAMETER args - Arguments that will be passed to the verification tool. -.EXAMPLE - PS> .\verify.ps1 *.nupkg - Verifies the metadata of all .nupkg files in the currect working directory. -.EXAMPLE - PS> .\verify.ps1 --help - Displays the help text of the downloaded verifiction tool. -.LINK - https://github.com/NuGet/NuGetGallery/blob/master/src/VerifyMicrosoftPackage/README.md -#> - -# This script was copied from https://github.com/NuGet/NuGetGallery/blob/3e25ad135146676bcab0050a516939d9958bfa5d/src/VerifyMicrosoftPackage/verify.ps1 - -[CmdletBinding(PositionalBinding = $false)] -param( - [string]$NuGetExePath, - [string]$PackageSource = "https://api.nuget.org/v3/index.json", - [string]$DownloadPath, - [Parameter(ValueFromRemainingArguments = $true)] - [string[]]$args -) - -# The URL to download nuget.exe. -$nugetExeUrl = "https://dist.nuget.org/win-x86-commandline/v4.9.4/nuget.exe" - -# The package ID of the verification tool. -$packageId = "NuGet.VerifyMicrosoftPackage" - -# The location that nuget.exe and the verification tool will be downloaded to. -if (!$DownloadPath) { - $DownloadPath = (Join-Path $env:TEMP "NuGet.VerifyMicrosoftPackage") -} - -$fence = New-Object -TypeName string -ArgumentList '=', 80 - -# Create the download directory, if it doesn't already exist. -if (!(Test-Path $DownloadPath)) { - New-Item -ItemType Directory $DownloadPath | Out-Null -} -Write-Host "Using download path: $DownloadPath" - -if ($NuGetExePath) { - $nuget = $NuGetExePath -} else { - $downloadedNuGetExe = Join-Path $DownloadPath "nuget.exe" - - # Download nuget.exe, if it doesn't already exist. - if (!(Test-Path $downloadedNuGetExe)) { - Write-Host "Downloading nuget.exe from $nugetExeUrl..." - $ProgressPreference = 'SilentlyContinue' - try { - Invoke-WebRequest $nugetExeUrl -OutFile $downloadedNuGetExe - $ProgressPreference = 'Continue' - } catch { - $ProgressPreference = 'Continue' - Write-Error $_ - Write-Error "nuget.exe failed to download." - exit - } - } - - $nuget = $downloadedNuGetExe -} - -Write-Host "Using nuget.exe path: $nuget" -Write-Host " " - -# Download the latest version of the verification tool. -Write-Host "Downloading the latest version of $packageId from $packageSource..." -Write-Host $fence -& $nuget install $packageId ` - -Prerelease ` - -OutputDirectory $DownloadPath ` - -Source $PackageSource -Write-Host $fence -Write-Host " " - -if ($LASTEXITCODE -ne 0) { - Write-Error "nuget.exe failed to fetch the verify tool." - exit -} - -# Find the most recently downloaded tool -Write-Host "Finding the most recently downloaded verification tool." -$verifyProbePath = Join-Path $DownloadPath "$packageId.*" -$verifyPath = Get-ChildItem -Path $verifyProbePath -Directory ` - | Sort-Object -Property LastWriteTime -Descending ` - | Select-Object -First 1 -$verify = Join-Path $verifyPath "tools\NuGet.VerifyMicrosoftPackage.exe" -Write-Host "Using verification tool: $verify" -Write-Host " " - -# Execute the verification tool. -Write-Host "Executing the verify tool..." -Write-Host $fence -& $verify $args -Write-Host $fence -Write-Host " " - -# Respond to the exit code. -if ($LASTEXITCODE -ne 0) { - Write-Error "The verify tool found some problems." -} else { - Write-Output "The verify tool succeeded." -} diff --git a/eng/common/post-build/publish-using-darc.ps1 b/eng/common/post-build/publish-using-darc.ps1 deleted file mode 100644 index 1eda208a3..000000000 --- a/eng/common/post-build/publish-using-darc.ps1 +++ /dev/null @@ -1,69 +0,0 @@ -param( - [Parameter(Mandatory=$true)][int] $BuildId, - [Parameter(Mandatory=$true)][int] $PublishingInfraVersion, - [Parameter(Mandatory=$true)][string] $AzdoToken, - [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net/', - [Parameter(Mandatory=$true)][string] $WaitPublishingFinish, - [Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters, - [Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters, - [Parameter(Mandatory=$false)][string] $RequireDefaultChannels, - [Parameter(Mandatory=$false)][string] $SkipAssetsPublishing -) - -try { - # `tools.ps1` checks $ci to perform some actions. Since the post-build - # scripts don't necessarily execute in the same agent that run the - # build.ps1/sh script this variable isn't automatically set. - $ci = $true - $disableConfigureToolsetImport = $true - . $PSScriptRoot\..\tools.ps1 - - $darc = Get-Darc - - $optionalParams = [System.Collections.ArrayList]::new() - - if ("" -ne $ArtifactsPublishingAdditionalParameters) { - $optionalParams.Add("--artifact-publishing-parameters") | Out-Null - $optionalParams.Add($ArtifactsPublishingAdditionalParameters) | Out-Null - } - - if ("" -ne $SymbolPublishingAdditionalParameters) { - $optionalParams.Add("--symbol-publishing-parameters") | Out-Null - $optionalParams.Add($SymbolPublishingAdditionalParameters) | Out-Null - } - - if ("false" -eq $WaitPublishingFinish) { - $optionalParams.Add("--no-wait") | Out-Null - } - - if ("true" -eq $RequireDefaultChannels) { - $optionalParams.Add("--default-channels-required") | Out-Null - } - - if ("true" -eq $SkipAssetsPublishing) { - $optionalParams.Add("--skip-assets-publishing") | Out-Null - } - - & $darc add-build-to-channel ` - --id $buildId ` - --publishing-infra-version $PublishingInfraVersion ` - --default-channels ` - --source-branch main ` - --azdev-pat "$AzdoToken" ` - --bar-uri "$MaestroApiEndPoint" ` - --ci ` - --verbose ` - @optionalParams - - if ($LastExitCode -ne 0) { - Write-Host "Problems using Darc to promote build ${buildId} to default channels. Stopping execution..." - exit 1 - } - - Write-Host 'done.' -} -catch { - Write-Host $_ - Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to publish build '$BuildId' to default channels." - ExitWithExitCode 1 -} diff --git a/eng/common/post-build/redact-logs.ps1 b/eng/common/post-build/redact-logs.ps1 deleted file mode 100644 index b7fc19591..000000000 --- a/eng/common/post-build/redact-logs.ps1 +++ /dev/null @@ -1,89 +0,0 @@ -[CmdletBinding(PositionalBinding=$False)] -param( - [Parameter(Mandatory=$true, Position=0)][string] $InputPath, - [Parameter(Mandatory=$true)][string] $BinlogToolVersion, - [Parameter(Mandatory=$false)][string] $DotnetPath, - [Parameter(Mandatory=$false)][string] $PackageFeed = 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json', - # File with strings to redact - separated by newlines. - # For comments start the line with '# ' - such lines are ignored - [Parameter(Mandatory=$false)][string] $TokensFilePath, - [Parameter(ValueFromRemainingArguments=$true)][String[]]$TokensToRedact -) - -try { - $ErrorActionPreference = 'Stop' - Set-StrictMode -Version 2.0 - - # `tools.ps1` checks $ci to perform some actions. Since the post-build - # scripts don't necessarily execute in the same agent that run the - # build.ps1/sh script this variable isn't automatically set. - $ci = $true - $disableConfigureToolsetImport = $true - . $PSScriptRoot\..\tools.ps1 - - $packageName = 'binlogtool' - - $dotnet = $DotnetPath - - if (!$dotnet) { - $dotnetRoot = InitializeDotNetCli -install:$true - $dotnet = "$dotnetRoot\dotnet.exe" - } - - $toolList = & "$dotnet" tool list -g - - if ($toolList -like "*$packageName*") { - & "$dotnet" tool uninstall $packageName -g - } - - $toolPath = "$PSScriptRoot\..\..\..\.tools" - $verbosity = 'minimal' - - New-Item -ItemType Directory -Force -Path $toolPath - - Push-Location -Path $toolPath - - try { - Write-Host "Installing Binlog redactor CLI..." - Write-Host "'$dotnet' new tool-manifest" - & "$dotnet" new tool-manifest - Write-Host "'$dotnet' tool install $packageName --local --add-source '$PackageFeed' -v $verbosity --version $BinlogToolVersion" - & "$dotnet" tool install $packageName --local --add-source "$PackageFeed" -v $verbosity --version $BinlogToolVersion - - if (Test-Path $TokensFilePath) { - Write-Host "Adding additional sensitive data for redaction from file: " $TokensFilePath - $TokensToRedact += Get-Content -Path $TokensFilePath | Foreach {$_.Trim()} | Where { $_ -notmatch "^# " } - } - - $optionalParams = [System.Collections.ArrayList]::new() - - Foreach ($p in $TokensToRedact) - { - if($p -match '^\$\(.*\)$') - { - Write-Host ("Ignoring token {0} as it is probably unexpanded AzDO variable" -f $p) - } - elseif($p) - { - $optionalParams.Add("-p:" + $p) | Out-Null - } - } - - & $dotnet binlogtool redact --input:$InputPath --recurse --in-place ` - @optionalParams - - if ($LastExitCode -ne 0) { - Write-PipelineTelemetryError -Category 'Redactor' -Type 'warning' -Message "Problems using Redactor tool (exit code: $LastExitCode). But ignoring them now." - } - } - finally { - Pop-Location - } - - Write-Host 'done.' -} -catch { - Write-Host $_ - Write-PipelineTelemetryError -Category 'Redactor' -Message "There was an error while trying to redact logs. Error: $_" - ExitWithExitCode 1 -} diff --git a/eng/common/post-build/sourcelink-validation.ps1 b/eng/common/post-build/sourcelink-validation.ps1 deleted file mode 100644 index 1976ef70f..000000000 --- a/eng/common/post-build/sourcelink-validation.ps1 +++ /dev/null @@ -1,327 +0,0 @@ -param( - [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where Symbols.NuGet packages to be checked are stored - [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation - [Parameter(Mandatory=$false)][string] $GHRepoName, # GitHub name of the repo including the Org. E.g., dotnet/arcade - [Parameter(Mandatory=$false)][string] $GHCommit, # GitHub commit SHA used to build the packages - [Parameter(Mandatory=$true)][string] $SourcelinkCliVersion # Version of SourceLink CLI to use -) - -$ErrorActionPreference = 'Stop' -Set-StrictMode -Version 2.0 - -# `tools.ps1` checks $ci to perform some actions. Since the post-build -# scripts don't necessarily execute in the same agent that run the -# build.ps1/sh script this variable isn't automatically set. -$ci = $true -$disableConfigureToolsetImport = $true -. $PSScriptRoot\..\tools.ps1 - -# Cache/HashMap (File -> Exist flag) used to consult whether a file exist -# in the repository at a specific commit point. This is populated by inserting -# all files present in the repo at a specific commit point. -$global:RepoFiles = @{} - -# Maximum number of jobs to run in parallel -$MaxParallelJobs = 16 - -$MaxRetries = 5 -$RetryWaitTimeInSeconds = 30 - -# Wait time between check for system load -$SecondsBetweenLoadChecks = 10 - -if (!$InputPath -or !(Test-Path $InputPath)){ - Write-Host "No files to validate." - ExitWithExitCode 0 -} - -$ValidatePackage = { - param( - [string] $PackagePath # Full path to a Symbols.NuGet package - ) - - . $using:PSScriptRoot\..\tools.ps1 - - # Ensure input file exist - if (!(Test-Path $PackagePath)) { - Write-Host "Input file does not exist: $PackagePath" - return [pscustomobject]@{ - result = 1 - packagePath = $PackagePath - } - } - - # Extensions for which we'll look for SourceLink information - # For now we'll only care about Portable & Embedded PDBs - $RelevantExtensions = @('.dll', '.exe', '.pdb') - - Write-Host -NoNewLine 'Validating ' ([System.IO.Path]::GetFileName($PackagePath)) '...' - - $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath) - $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId - $FailedFiles = 0 - - Add-Type -AssemblyName System.IO.Compression.FileSystem - - [System.IO.Directory]::CreateDirectory($ExtractPath) | Out-Null - - try { - $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath) - - $zip.Entries | - Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} | - ForEach-Object { - $FileName = $_.FullName - $Extension = [System.IO.Path]::GetExtension($_.Name) - $FakeName = -Join((New-Guid), $Extension) - $TargetFile = Join-Path -Path $ExtractPath -ChildPath $FakeName - - # We ignore resource DLLs - if ($FileName.EndsWith('.resources.dll')) { - return [pscustomobject]@{ - result = 0 - packagePath = $PackagePath - } - } - - [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true) - - $ValidateFile = { - param( - [string] $FullPath, # Full path to the module that has to be checked - [string] $RealPath, - [ref] $FailedFiles - ) - - $sourcelinkExe = "$env:USERPROFILE\.dotnet\tools" - $sourcelinkExe = Resolve-Path "$sourcelinkExe\sourcelink.exe" - $SourceLinkInfos = & $sourcelinkExe print-urls $FullPath | Out-String - - if ($LASTEXITCODE -eq 0 -and -not ([string]::IsNullOrEmpty($SourceLinkInfos))) { - $NumFailedLinks = 0 - - # We only care about Http addresses - $Matches = (Select-String '(http[s]?)(:\/\/)([^\s,]+)' -Input $SourceLinkInfos -AllMatches).Matches - - if ($Matches.Count -ne 0) { - $Matches.Value | - ForEach-Object { - $Link = $_ - $CommitUrl = "https://raw.githubusercontent.com/$%7Busing:GHRepoName%7D/$%7Busing:GHCommit%7D/" - - $FilePath = $Link.Replace($CommitUrl, "") - $Status = 200 - $Cache = $using:RepoFiles - - $attempts = 0 - - while ($attempts -lt $using:MaxRetries) { - if ( !($Cache.ContainsKey($FilePath)) ) { - try { - $Uri = $Link -as [System.URI] - - if ($Link -match "submodules") { - # Skip submodule links until sourcelink properly handles submodules - $Status = 200 - } - elseif ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) { - # Only GitHub links are valid - $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode - } - else { - # If it's not a github link, we want to break out of the loop and not retry. - $Status = 0 - $attempts = $using:MaxRetries - } - } - catch { - Write-Host $_ - $Status = 0 - } - } - - if ($Status -ne 200) { - $attempts++ - - if ($attempts -lt $using:MaxRetries) - { - $attemptsLeft = $using:MaxRetries - $attempts - Write-Warning "Download failed, $attemptsLeft attempts remaining, will retry in $using:RetryWaitTimeInSeconds seconds" - Start-Sleep -Seconds $using:RetryWaitTimeInSeconds - } - else { - if ($NumFailedLinks -eq 0) { - if ($FailedFiles.Value -eq 0) { - Write-Host - } - - Write-Host "`tFile $RealPath has broken links:" - } - - Write-Host "`t`tFailed to retrieve $Link" - - $NumFailedLinks++ - } - } - else { - break - } - } - } - } - - if ($NumFailedLinks -ne 0) { - $FailedFiles.value++ - $global:LASTEXITCODE = 1 - } - } - } - - &$ValidateFile $TargetFile $FileName ([ref]$FailedFiles) - } - } - catch { - Write-Host $_ - } - finally { - $zip.Dispose() - } - - if ($FailedFiles -eq 0) { - Write-Host 'Passed.' - return [pscustomobject]@{ - result = 0 - packagePath = $PackagePath - } - } - else { - Write-PipelineTelemetryError -Category 'SourceLink' -Message "$PackagePath has broken SourceLink links." - return [pscustomobject]@{ - result = 1 - packagePath = $PackagePath - } - } -} - -function CheckJobResult( - $result, - $packagePath, - [ref]$ValidationFailures, - [switch]$logErrors) { - if ($result -ne '0') { - if ($logErrors) { - Write-PipelineTelemetryError -Category 'SourceLink' -Message "$packagePath has broken SourceLink links." - } - $ValidationFailures.Value++ - } -} - -function ValidateSourceLinkLinks { - if ($GHRepoName -ne '' -and !($GHRepoName -Match '^[^\s\/]+/[^\s\/]+$')) { - if (!($GHRepoName -Match '^[^\s-]+-[^\s]+$')) { - Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHRepoName should be in the format / or -. '$GHRepoName'" - ExitWithExitCode 1 - } - else { - $GHRepoName = $GHRepoName -replace '^([^\s-]+)-([^\s]+)$', '$1/$2'; - } - } - - if ($GHCommit -ne '' -and !($GHCommit -Match '^[0-9a-fA-F]{40}$')) { - Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHCommit should be a 40 chars hexadecimal string. '$GHCommit'" - ExitWithExitCode 1 - } - - if ($GHRepoName -ne '' -and $GHCommit -ne '') { - $RepoTreeURL = -Join('http://api.github.com/repos/', $GHRepoName, '/git/trees/', $GHCommit, '?recursive=1') - $CodeExtensions = @('.cs', '.vb', '.fs', '.fsi', '.fsx', '.fsscript') - - try { - # Retrieve the list of files in the repo at that particular commit point and store them in the RepoFiles hash - $Data = Invoke-WebRequest $RepoTreeURL -UseBasicParsing | ConvertFrom-Json | Select-Object -ExpandProperty tree - - foreach ($file in $Data) { - $Extension = [System.IO.Path]::GetExtension($file.path) - - if ($CodeExtensions.Contains($Extension)) { - $RepoFiles[$file.path] = 1 - } - } - } - catch { - Write-Host "Problems downloading the list of files from the repo. Url used: $RepoTreeURL . Execution will proceed without caching." - } - } - elseif ($GHRepoName -ne '' -or $GHCommit -ne '') { - Write-Host 'For using the http caching mechanism both GHRepoName and GHCommit should be informed.' - } - - if (Test-Path $ExtractPath) { - Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue - } - - $ValidationFailures = 0 - - # Process each NuGet package in parallel - Get-ChildItem "$InputPath\*.symbols.nupkg" | - ForEach-Object { - Write-Host "Starting $($_.FullName)" - Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName | Out-Null - $NumJobs = @(Get-Job -State 'Running').Count - - while ($NumJobs -ge $MaxParallelJobs) { - Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again." - sleep $SecondsBetweenLoadChecks - $NumJobs = @(Get-Job -State 'Running').Count - } - - foreach ($Job in @(Get-Job -State 'Completed')) { - $jobResult = Wait-Job -Id $Job.Id | Receive-Job - CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures) -LogErrors - Remove-Job -Id $Job.Id - } - } - - foreach ($Job in @(Get-Job)) { - $jobResult = Wait-Job -Id $Job.Id | Receive-Job - CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures) - Remove-Job -Id $Job.Id - } - if ($ValidationFailures -gt 0) { - Write-PipelineTelemetryError -Category 'SourceLink' -Message "$ValidationFailures package(s) failed validation." - ExitWithExitCode 1 - } -} - -function InstallSourcelinkCli { - $sourcelinkCliPackageName = 'sourcelink' - - $dotnetRoot = InitializeDotNetCli -install:$true - $dotnet = "$dotnetRoot\dotnet.exe" - $toolList = & "$dotnet" tool list --global - - if (($toolList -like "*$sourcelinkCliPackageName*") -and ($toolList -like "*$sourcelinkCliVersion*")) { - Write-Host "SourceLink CLI version $sourcelinkCliVersion is already installed." - } - else { - Write-Host "Installing SourceLink CLI version $sourcelinkCliVersion..." - Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.' - & "$dotnet" tool install $sourcelinkCliPackageName --version $sourcelinkCliVersion --verbosity "minimal" --global - } -} - -try { - InstallSourcelinkCli - - foreach ($Job in @(Get-Job)) { - Remove-Job -Id $Job.Id - } - - ValidateSourceLinkLinks -} -catch { - Write-Host $_.Exception - Write-Host $_.ScriptStackTrace - Write-PipelineTelemetryError -Category 'SourceLink' -Message $_ - ExitWithExitCode 1 -} diff --git a/eng/common/post-build/symbols-validation.ps1 b/eng/common/post-build/symbols-validation.ps1 deleted file mode 100644 index 7146e593f..000000000 --- a/eng/common/post-build/symbols-validation.ps1 +++ /dev/null @@ -1,337 +0,0 @@ -param( - [Parameter(Mandatory = $true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored - [Parameter(Mandatory = $true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation - [Parameter(Mandatory = $true)][string] $DotnetSymbolVersion, # Version of dotnet symbol to use - [Parameter(Mandatory = $false)][switch] $CheckForWindowsPdbs, # If we should check for the existence of windows pdbs in addition to portable PDBs - [Parameter(Mandatory = $false)][switch] $ContinueOnError, # If we should keep checking symbols after an error - [Parameter(Mandatory = $false)][switch] $Clean, # Clean extracted symbols directory after checking symbols - [Parameter(Mandatory = $false)][string] $SymbolExclusionFile # Exclude the symbols in the file from publishing to symbol server -) - -. $PSScriptRoot\..\tools.ps1 -# Maximum number of jobs to run in parallel -$MaxParallelJobs = 16 - -# Max number of retries -$MaxRetry = 5 - -# Wait time between check for system load -$SecondsBetweenLoadChecks = 10 - -# Set error codes -Set-Variable -Name "ERROR_BADEXTRACT" -Option Constant -Value -1 -Set-Variable -Name "ERROR_FILEDOESNOTEXIST" -Option Constant -Value -2 - -$WindowsPdbVerificationParam = "" -if ($CheckForWindowsPdbs) { - $WindowsPdbVerificationParam = "--windows-pdbs" -} - -$ExclusionSet = New-Object System.Collections.Generic.HashSet[string]; - -if (!$InputPath -or !(Test-Path $InputPath)){ - Write-Host "No symbols to validate." - ExitWithExitCode 0 -} - -#Check if the path exists -if ($SymbolExclusionFile -and (Test-Path $SymbolExclusionFile)){ - [string[]]$Exclusions = Get-Content "$SymbolExclusionFile" - $Exclusions | foreach { if($_ -and $_.Trim()){$ExclusionSet.Add($_)} } -} -else{ - Write-Host "Symbol Exclusion file does not exists. No symbols to exclude." -} - -$CountMissingSymbols = { - param( - [string] $PackagePath, # Path to a NuGet package - [string] $WindowsPdbVerificationParam # If we should check for the existence of windows pdbs in addition to portable PDBs - ) - - Add-Type -AssemblyName System.IO.Compression.FileSystem - - Write-Host "Validating $PackagePath " - - # Ensure input file exist - if (!(Test-Path $PackagePath)) { - Write-PipelineTaskError "Input file does not exist: $PackagePath" - return [pscustomobject]@{ - result = $using:ERROR_FILEDOESNOTEXIST - packagePath = $PackagePath - } - } - - # Extensions for which we'll look for symbols - $RelevantExtensions = @('.dll', '.exe', '.so', '.dylib') - - # How many files are missing symbol information - $MissingSymbols = 0 - - $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath) - $PackageGuid = New-Guid - $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageGuid - $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath 'Symbols' - - try { - [System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath) - } - catch { - Write-Host "Something went wrong extracting $PackagePath" - Write-Host $_ - return [pscustomobject]@{ - result = $using:ERROR_BADEXTRACT - packagePath = $PackagePath - } - } - - Get-ChildItem -Recurse $ExtractPath | - Where-Object { $RelevantExtensions -contains $_.Extension } | - ForEach-Object { - $FileName = $_.FullName - if ($FileName -Match '\\ref\\') { - Write-Host "`t Ignoring reference assembly file " $FileName - return - } - - $FirstMatchingSymbolDescriptionOrDefault = { - param( - [string] $FullPath, # Full path to the module that has to be checked - [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols - [string] $WindowsPdbVerificationParam, # Parameter to pass to potential check for windows-pdbs. - [string] $SymbolsPath - ) - - $FileName = [System.IO.Path]::GetFileName($FullPath) - $Extension = [System.IO.Path]::GetExtension($FullPath) - - # Those below are potential symbol files that the `dotnet symbol` might - # return. Which one will be returned depend on the type of file we are - # checking and which type of file was uploaded. - - # The file itself is returned - $SymbolPath = $SymbolsPath + '\' + $FileName - - # PDB file for the module - $PdbPath = $SymbolPath.Replace($Extension, '.pdb') - - # PDB file for R2R module (created by crossgen) - $NGenPdb = $SymbolPath.Replace($Extension, '.ni.pdb') - - # DBG file for a .so library - $SODbg = $SymbolPath.Replace($Extension, '.so.dbg') - - # DWARF file for a .dylib - $DylibDwarf = $SymbolPath.Replace($Extension, '.dylib.dwarf') - - $dotnetSymbolExe = "$env:USERPROFILE\.dotnet\tools" - $dotnetSymbolExe = Resolve-Path "$dotnetSymbolExe\dotnet-symbol.exe" - - $totalRetries = 0 - - while ($totalRetries -lt $using:MaxRetry) { - - # Save the output and get diagnostic output - $output = & $dotnetSymbolExe --symbols --modules $WindowsPdbVerificationParam $TargetServerParam $FullPath -o $SymbolsPath --diagnostics | Out-String - - if ((Test-Path $PdbPath) -and (Test-path $SymbolPath)) { - return 'Module and PDB for Module' - } - elseif ((Test-Path $NGenPdb) -and (Test-Path $PdbPath) -and (Test-Path $SymbolPath)) { - return 'Dll, PDB and NGen PDB' - } - elseif ((Test-Path $SODbg) -and (Test-Path $SymbolPath)) { - return 'So and DBG for SO' - } - elseif ((Test-Path $DylibDwarf) -and (Test-Path $SymbolPath)) { - return 'Dylib and Dwarf for Dylib' - } - elseif (Test-Path $SymbolPath) { - return 'Module' - } - else - { - $totalRetries++ - } - } - - return $null - } - - $FileRelativePath = $FileName.Replace("$ExtractPath\", "") - if (($($using:ExclusionSet) -ne $null) -and ($($using:ExclusionSet).Contains($FileRelativePath) -or ($($using:ExclusionSet).Contains($FileRelativePath.Replace("\", "/"))))){ - Write-Host "Skipping $FileName from symbol validation" - } - - else { - $FileGuid = New-Guid - $ExpandedSymbolsPath = Join-Path -Path $SymbolsPath -ChildPath $FileGuid - - $SymbolsOnMSDL = & $FirstMatchingSymbolDescriptionOrDefault ` - -FullPath $FileName ` - -TargetServerParam '--microsoft-symbol-server' ` - -SymbolsPath "$ExpandedSymbolsPath-msdl" ` - -WindowsPdbVerificationParam $WindowsPdbVerificationParam - $SymbolsOnSymWeb = & $FirstMatchingSymbolDescriptionOrDefault ` - -FullPath $FileName ` - -TargetServerParam '--internal-server' ` - -SymbolsPath "$ExpandedSymbolsPath-symweb" ` - -WindowsPdbVerificationParam $WindowsPdbVerificationParam - - Write-Host -NoNewLine "`t Checking file " $FileName "... " - - if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) { - Write-Host "Symbols found on MSDL ($SymbolsOnMSDL) and SymWeb ($SymbolsOnSymWeb)" - } - else { - $MissingSymbols++ - - if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) { - Write-Host 'No symbols found on MSDL or SymWeb!' - } - else { - if ($SymbolsOnMSDL -eq $null) { - Write-Host 'No symbols found on MSDL!' - } - else { - Write-Host 'No symbols found on SymWeb!' - } - } - } - } - } - - if ($using:Clean) { - Remove-Item $ExtractPath -Recurse -Force - } - - Pop-Location - - return [pscustomobject]@{ - result = $MissingSymbols - packagePath = $PackagePath - } -} - -function CheckJobResult( - $result, - $packagePath, - [ref]$DupedSymbols, - [ref]$TotalFailures) { - if ($result -eq $ERROR_BADEXTRACT) { - Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath has duplicated symbol files" - $DupedSymbols.Value++ - } - elseif ($result -eq $ERROR_FILEDOESNOTEXIST) { - Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath does not exist" - $TotalFailures.Value++ - } - elseif ($result -gt '0') { - Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Missing symbols for $result modules in the package $packagePath" - $TotalFailures.Value++ - } - else { - Write-Host "All symbols verified for package $packagePath" - } -} - -function CheckSymbolsAvailable { - if (Test-Path $ExtractPath) { - Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue - } - - $TotalPackages = 0 - $TotalFailures = 0 - $DupedSymbols = 0 - - Get-ChildItem "$InputPath\*.nupkg" | - ForEach-Object { - $FileName = $_.Name - $FullName = $_.FullName - - # These packages from Arcade-Services include some native libraries that - # our current symbol uploader can't handle. Below is a workaround until - # we get issue: https://github.com/dotnet/arcade/issues/2457 sorted. - if ($FileName -Match 'Microsoft\.DotNet\.Darc\.') { - Write-Host "Ignoring Arcade-services file: $FileName" - Write-Host - return - } - elseif ($FileName -Match 'Microsoft\.DotNet\.Maestro\.Tasks\.') { - Write-Host "Ignoring Arcade-services file: $FileName" - Write-Host - return - } - - $TotalPackages++ - - Start-Job -ScriptBlock $CountMissingSymbols -ArgumentList @($FullName,$WindowsPdbVerificationParam) | Out-Null - - $NumJobs = @(Get-Job -State 'Running').Count - - while ($NumJobs -ge $MaxParallelJobs) { - Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again." - sleep $SecondsBetweenLoadChecks - $NumJobs = @(Get-Job -State 'Running').Count - } - - foreach ($Job in @(Get-Job -State 'Completed')) { - $jobResult = Wait-Job -Id $Job.Id | Receive-Job - CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$DupedSymbols) ([ref]$TotalFailures) - Remove-Job -Id $Job.Id - } - Write-Host - } - - foreach ($Job in @(Get-Job)) { - $jobResult = Wait-Job -Id $Job.Id | Receive-Job - CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$DupedSymbols) ([ref]$TotalFailures) - } - - if ($TotalFailures -gt 0 -or $DupedSymbols -gt 0) { - if ($TotalFailures -gt 0) { - Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Symbols missing for $TotalFailures/$TotalPackages packages" - } - - if ($DupedSymbols -gt 0) { - Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$DupedSymbols/$TotalPackages packages had duplicated symbol files and could not be extracted" - } - - ExitWithExitCode 1 - } - else { - Write-Host "All symbols validated!" - } -} - -function InstallDotnetSymbol { - $dotnetSymbolPackageName = 'dotnet-symbol' - - $dotnetRoot = InitializeDotNetCli -install:$true - $dotnet = "$dotnetRoot\dotnet.exe" - $toolList = & "$dotnet" tool list --global - - if (($toolList -like "*$dotnetSymbolPackageName*") -and ($toolList -like "*$dotnetSymbolVersion*")) { - Write-Host "dotnet-symbol version $dotnetSymbolVersion is already installed." - } - else { - Write-Host "Installing dotnet-symbol version $dotnetSymbolVersion..." - Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.' - & "$dotnet" tool install $dotnetSymbolPackageName --version $dotnetSymbolVersion --verbosity "minimal" --global - } -} - -try { - InstallDotnetSymbol - - foreach ($Job in @(Get-Job)) { - Remove-Job -Id $Job.Id - } - - CheckSymbolsAvailable -} -catch { - Write-Host $_.ScriptStackTrace - Write-PipelineTelemetryError -Category 'CheckSymbols' -Message $_ - ExitWithExitCode 1 -} diff --git a/eng/common/retain-build.ps1 b/eng/common/retain-build.ps1 deleted file mode 100644 index e7ba975ad..000000000 --- a/eng/common/retain-build.ps1 +++ /dev/null @@ -1,45 +0,0 @@ - -Param( -[Parameter(Mandatory=$true)][int] $buildId, -[Parameter(Mandatory=$true)][string] $azdoOrgUri, -[Parameter(Mandatory=$true)][string] $azdoProject, -[Parameter(Mandatory=$true)][string] $token -) - -$ErrorActionPreference = 'Stop' -Set-StrictMode -Version 2.0 - -function Get-AzDOHeaders( - [string] $token) -{ - $base64AuthInfo = [Convert]::ToBase64String([Text.Encoding]::ASCII.GetBytes(":${token}")) - $headers = @{"Authorization"="Basic $base64AuthInfo"} - return $headers -} - -function Update-BuildRetention( - [string] $azdoOrgUri, - [string] $azdoProject, - [int] $buildId, - [string] $token) -{ - $headers = Get-AzDOHeaders -token $token - $requestBody = "{ - `"keepForever`": `"true`" - }" - - $requestUri = "${azdoOrgUri}/${azdoProject}/_apis/build/builds/${buildId}?api-version=6.0" - write-Host "Attempting to retain build using the following URI: ${requestUri} ..." - - try { - Invoke-RestMethod -Uri $requestUri -Method Patch -Body $requestBody -Header $headers -contentType "application/json" - Write-Host "Updated retention settings for build ${buildId}." - } - catch { - Write-Error "Failed to update retention settings for build: $_.Exception.Response.StatusDescription" - exit 1 - } -} - -Update-BuildRetention -azdoOrgUri $azdoOrgUri -azdoProject $azdoProject -buildId $buildId -token $token -exit 0 diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1 deleted file mode 100644 index a9d2a2d26..000000000 --- a/eng/common/sdk-task.ps1 +++ /dev/null @@ -1,100 +0,0 @@ -[CmdletBinding(PositionalBinding=$false)] -Param( - [string] $configuration = 'Debug', - [string] $task, - [string] $verbosity = 'minimal', - [string] $msbuildEngine = $null, - [switch] $restore, - [switch] $prepareMachine, - [switch][Alias('nobl')]$excludeCIBinaryLog, - [switch] $help, - [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties -) - -$ci = $true -$binaryLog = if ($excludeCIBinaryLog) { $false } else { $true } -$warnAsError = $true - -. $PSScriptRoot\tools.ps1 - -function Print-Usage() { - Write-Host "Common settings:" - Write-Host " -task Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)" - Write-Host " -restore Restore dependencies" - Write-Host " -verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]" - Write-Host " -help Print help and exit" - Write-Host "" - - Write-Host "Advanced settings:" - Write-Host " -prepareMachine Prepare machine for CI run" - Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)." - Write-Host " -excludeCIBinaryLog When running on CI, allow no binary log (short: -nobl)" - Write-Host "" - Write-Host "Command line arguments not listed above are passed thru to msbuild." -} - -function Build([string]$target) { - $logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" } - $log = Join-Path $LogDir "$task$logSuffix.binlog" - $binaryLogArg = if ($binaryLog) { "/bl:$log" } else { "" } - $outputPath = Join-Path $ToolsetDir "$task\" - - MSBuild $taskProject ` - $binaryLogArg ` - /t:$target ` - /p:Configuration=$configuration ` - /p:RepoRoot=$RepoRoot ` - /p:BaseIntermediateOutputPath=$outputPath ` - /v:$verbosity ` - @properties -} - -try { - if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) { - Print-Usage - exit 0 - } - - if ($task -eq "") { - Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task '" - Print-Usage - ExitWithExitCode 1 - } - - if( $msbuildEngine -eq "vs") { - # Ensure desktop MSBuild is available for sdk tasks. - if( -not ($GlobalJson.tools.PSObject.Properties.Name -contains "vs" )) { - $GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty - } - if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) { - $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.13.0" -MemberType NoteProperty - } - if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") { - $xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true - } - if ($xcopyMSBuildToolsFolder -eq $null) { - throw 'Unable to get xcopy downloadable version of msbuild' - } - - $global:_MSBuildExe = "$($xcopyMSBuildToolsFolder)\MSBuild\Current\Bin\MSBuild.exe" - } - - $taskProject = GetSdkTaskProject $task - if (!(Test-Path $taskProject)) { - Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task" - ExitWithExitCode 1 - } - - if ($restore) { - Build 'Restore' - } - - Build 'Execute' -} -catch { - Write-Host $_.ScriptStackTrace - Write-PipelineTelemetryError -Category 'Build' -Message $_ - ExitWithExitCode 1 -} - -ExitWithExitCode 0 diff --git a/eng/common/sdk-task.sh b/eng/common/sdk-task.sh deleted file mode 100644 index 2f83adc02..000000000 --- a/eng/common/sdk-task.sh +++ /dev/null @@ -1,116 +0,0 @@ -#!/usr/bin/env bash - -show_usage() { - echo "Common settings:" - echo " --task Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)" - echo " --restore Restore dependencies" - echo " --verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]" - echo " --help Print help and exit" - echo "" - - echo "Advanced settings:" - echo " --excludeCIBinarylog Don't output binary log (short: -nobl)" - echo "" - echo "Command line arguments not listed above are passed thru to msbuild." -} - -source="${BASH_SOURCE[0]}" - -# resolve $source until the file is no longer a symlink -while [[ -h "$source" ]]; do - scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - source="$(readlink "$source")" - # if $source was a relative symlink, we need to resolve it relative to the path where the - # symlink file was located - [[ $source != /* ]] && source="$scriptroot/$source" -done -scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - -Build() { - local target=$1 - local log_suffix="" - [[ "$target" != "Execute" ]] && log_suffix=".$target" - local log="$log_dir/$task$log_suffix.binlog" - local binaryLogArg="" - [[ $binary_log == true ]] && binaryLogArg="/bl:$log" - local output_path="$toolset_dir/$task/" - - MSBuild "$taskProject" \ - $binaryLogArg \ - /t:"$target" \ - /p:Configuration="$configuration" \ - /p:RepoRoot="$repo_root" \ - /p:BaseIntermediateOutputPath="$output_path" \ - /v:"$verbosity" \ - $properties -} - -binary_log=true -configuration="Debug" -verbosity="minimal" -exclude_ci_binary_log=false -restore=false -help=false -properties='' - -while (($# > 0)); do - lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" - case $lowerI in - --task) - task=$2 - shift 2 - ;; - --restore) - restore=true - shift 1 - ;; - --verbosity) - verbosity=$2 - shift 2 - ;; - --excludecibinarylog|--nobl) - binary_log=false - exclude_ci_binary_log=true - shift 1 - ;; - --help) - help=true - shift 1 - ;; - *) - properties="$properties $1" - shift 1 - ;; - esac -done - -ci=true -warnAsError=true - -if $help; then - show_usage - exit 0 -fi - -. "$scriptroot/tools.sh" -InitializeToolset - -if [[ -z "$task" ]]; then - Write-PipelineTelemetryError -Category 'Task' -Name 'MissingTask' -Message "Missing required parameter '-task '" - ExitWithExitCode 1 -fi - -taskProject=$(GetSdkTaskProject "$task") -if [[ ! -e "$taskProject" ]]; then - Write-PipelineTelemetryError -Category 'Task' -Name 'UnknownTask' -Message "Unknown task: $task" - ExitWithExitCode 1 -fi - -if $restore; then - Build "Restore" -fi - -Build "Execute" - - -ExitWithExitCode 0 diff --git a/eng/common/sdl/NuGet.config b/eng/common/sdl/NuGet.config deleted file mode 100644 index 3849bdb3c..000000000 --- a/eng/common/sdl/NuGet.config +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - - - - diff --git a/eng/common/sdl/configure-sdl-tool.ps1 b/eng/common/sdl/configure-sdl-tool.ps1 deleted file mode 100644 index 27f5a4115..000000000 --- a/eng/common/sdl/configure-sdl-tool.ps1 +++ /dev/null @@ -1,130 +0,0 @@ -Param( - [string] $GuardianCliLocation, - [string] $WorkingDirectory, - [string] $TargetDirectory, - [string] $GdnFolder, - # The list of Guardian tools to configure. For each object in the array: - # - If the item is a [hashtable], it must contain these entries: - # - Name = The tool name as Guardian knows it. - # - Scenario = (Optional) Scenario-specific name for this configuration entry. It must be unique - # among all tool entries with the same Name. - # - Args = (Optional) Array of Guardian tool configuration args, like '@("Target > C:\temp")' - # - If the item is a [string] $v, it is treated as '@{ Name="$v" }' - [object[]] $ToolsList, - [string] $GuardianLoggerLevel='Standard', - # Optional: Additional params to add to any tool using CredScan. - [string[]] $CrScanAdditionalRunConfigParams, - # Optional: Additional params to add to any tool using PoliCheck. - [string[]] $PoliCheckAdditionalRunConfigParams, - # Optional: Additional params to add to any tool using CodeQL/Semmle. - [string[]] $CodeQLAdditionalRunConfigParams, - # Optional: Additional params to add to any tool using Binskim. - [string[]] $BinskimAdditionalRunConfigParams -) - -$ErrorActionPreference = 'Stop' -Set-StrictMode -Version 2.0 -$disableConfigureToolsetImport = $true -$global:LASTEXITCODE = 0 - -try { - # `tools.ps1` checks $ci to perform some actions. Since the SDL - # scripts don't necessarily execute in the same agent that run the - # build.ps1/sh script this variable isn't automatically set. - $ci = $true - . $PSScriptRoot\..\tools.ps1 - - # Normalize tools list: all in [hashtable] form with defined values for each key. - $ToolsList = $ToolsList | - ForEach-Object { - if ($_ -is [string]) { - $_ = @{ Name = $_ } - } - - if (-not ($_['Scenario'])) { $_.Scenario = "" } - if (-not ($_['Args'])) { $_.Args = @() } - $_ - } - - Write-Host "List of tools to configure:" - $ToolsList | ForEach-Object { $_ | Out-String | Write-Host } - - # We store config files in the r directory of .gdn - $gdnConfigPath = Join-Path $GdnFolder 'r' - $ValidPath = Test-Path $GuardianCliLocation - - if ($ValidPath -eq $False) - { - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location." - ExitWithExitCode 1 - } - - foreach ($tool in $ToolsList) { - # Put together the name and scenario to make a unique key. - $toolConfigName = $tool.Name - if ($tool.Scenario) { - $toolConfigName += "_" + $tool.Scenario - } - - Write-Host "=== Configuring $toolConfigName..." - - $gdnConfigFile = Join-Path $gdnConfigPath "$toolConfigName-configure.gdnconfig" - - # For some tools, add default and automatic args. - switch -Exact ($tool.Name) { - 'credscan' { - if ($targetDirectory) { - $tool.Args += "`"TargetDirectory < $TargetDirectory`"" - } - $tool.Args += "`"OutputType < pre`"" - $tool.Args += $CrScanAdditionalRunConfigParams - } - 'policheck' { - if ($targetDirectory) { - $tool.Args += "`"Target < $TargetDirectory`"" - } - $tool.Args += $PoliCheckAdditionalRunConfigParams - } - {$_ -in 'semmle', 'codeql'} { - if ($targetDirectory) { - $tool.Args += "`"SourceCodeDirectory < $TargetDirectory`"" - } - $tool.Args += $CodeQLAdditionalRunConfigParams - } - 'binskim' { - if ($targetDirectory) { - # Binskim crashes due to specific PDBs. GitHub issue: https://github.com/microsoft/binskim/issues/924. - # We are excluding all `_.pdb` files from the scan. - $tool.Args += "`"Target < $TargetDirectory\**;-:file|$TargetDirectory\**\_.pdb`"" - } - $tool.Args += $BinskimAdditionalRunConfigParams - } - } - - # Create variable pointing to the args array directly so we can use splat syntax later. - $toolArgs = $tool.Args - - # Configure the tool. If args array is provided or the current tool has some default arguments - # defined, add "--args" and splat each element on the end. Arg format is "{Arg id} < {Value}", - # one per parameter. Doc page for "guardian configure": - # https://dev.azure.com/securitytools/SecurityIntegration/_wiki/wikis/Guardian/1395/configure - Exec-BlockVerbosely { - & $GuardianCliLocation configure ` - --working-directory $WorkingDirectory ` - --tool $tool.Name ` - --output-path $gdnConfigFile ` - --logger-level $GuardianLoggerLevel ` - --noninteractive ` - --force ` - $(if ($toolArgs) { "--args" }) @toolArgs - Exit-IfNZEC "Sdl" - } - - Write-Host "Created '$toolConfigName' configuration file: $gdnConfigFile" - } -} -catch { - Write-Host $_.ScriptStackTrace - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ - ExitWithExitCode 1 -} diff --git a/eng/common/sdl/execute-all-sdl-tools.ps1 b/eng/common/sdl/execute-all-sdl-tools.ps1 deleted file mode 100644 index 4715d75e9..000000000 --- a/eng/common/sdl/execute-all-sdl-tools.ps1 +++ /dev/null @@ -1,167 +0,0 @@ -Param( - [string] $GuardianPackageName, # Required: the name of guardian CLI package (not needed if GuardianCliLocation is specified) - [string] $NugetPackageDirectory, # Required: directory where NuGet packages are installed (not needed if GuardianCliLocation is specified) - [string] $GuardianCliLocation, # Optional: Direct location of Guardian CLI executable if GuardianPackageName & NugetPackageDirectory are not specified - [string] $Repository=$env:BUILD_REPOSITORY_NAME, # Required: the name of the repository (e.g. dotnet/arcade) - [string] $BranchName=$env:BUILD_SOURCEBRANCH, # Optional: name of branch or version of gdn settings; defaults to master - [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located - [string] $ArtifactsDirectory = (Join-Path $env:BUILD_ARTIFACTSTAGINGDIRECTORY ('artifacts')), # Required: the directory where build artifacts are located - [string] $AzureDevOpsAccessToken, # Required: access token for dnceng; should be provided via KeyVault - - # Optional: list of SDL tools to run on source code. See 'configure-sdl-tool.ps1' for tools list - # format. - [object[]] $SourceToolsList, - # Optional: list of SDL tools to run on built artifacts. See 'configure-sdl-tool.ps1' for tools - # list format. - [object[]] $ArtifactToolsList, - # Optional: list of SDL tools to run without automatically specifying a target directory. See - # 'configure-sdl-tool.ps1' for tools list format. - [object[]] $CustomToolsList, - - [bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs. - [string] $TsaBranchName=$env:BUILD_SOURCEBRANCH, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs. - [string] $TsaRepositoryName=$env:BUILD_REPOSITORY_NAME, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs. - [string] $BuildNumber=$env:BUILD_BUILDNUMBER, # Optional: required for TSA publish; defaults to $(Build.BuildNumber) - [bool] $UpdateBaseline=$False, # Optional: if true, will update the baseline in the repository; should only be run after fixing any issues which need to be fixed - [bool] $TsaOnboard=$False, # Optional: if true, will onboard the repository to TSA; should only be run once; TSA is the automated framework used to upload test results as bugs. - [string] $TsaInstanceUrl, # Optional: only needed if TsaOnboard or TsaPublish is true; the instance-url registered with TSA; TSA is the automated framework used to upload test results as bugs. - [string] $TsaCodebaseName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the codebase registered with TSA; TSA is the automated framework used to upload test results as bugs. - [string] $TsaProjectName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the project registered with TSA; TSA is the automated framework used to upload test results as bugs. - [string] $TsaNotificationEmail, # Optional: only needed if TsaOnboard is true; the email(s) which will receive notifications of TSA bug filings (e.g. alias@microsoft.com); TSA is the automated framework used to upload test results as bugs. - [string] $TsaCodebaseAdmin, # Optional: only needed if TsaOnboard is true; the aliases which are admins of the TSA codebase (e.g. DOMAIN\alias); TSA is the automated framework used to upload test results as bugs. - [string] $TsaBugAreaPath, # Optional: only needed if TsaOnboard is true; the area path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs. - [string] $TsaIterationPath, # Optional: only needed if TsaOnboard is true; the iteration path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs. - [string] $GuardianLoggerLevel='Standard', # Optional: the logger level for the Guardian CLI; options are Trace, Verbose, Standard, Warning, and Error - [string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1") - [string[]] $PoliCheckAdditionalRunConfigParams, # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1") - [string[]] $CodeQLAdditionalRunConfigParams, # Optional: Additional Params to custom build a Semmle/CodeQL run config in the format @("xyz < abc","sdf < 1") - [string[]] $BinskimAdditionalRunConfigParams, # Optional: Additional Params to custom build a Binskim run config in the format @("xyz < abc","sdf < 1") - [bool] $BreakOnFailure=$False # Optional: Fail the build if there were errors during the run -) - -try { - $ErrorActionPreference = 'Stop' - Set-StrictMode -Version 2.0 - $disableConfigureToolsetImport = $true - $global:LASTEXITCODE = 0 - - # `tools.ps1` checks $ci to perform some actions. Since the SDL - # scripts don't necessarily execute in the same agent that run the - # build.ps1/sh script this variable isn't automatically set. - $ci = $true - . $PSScriptRoot\..\tools.ps1 - - #Replace repo names to the format of org/repo - if (!($Repository.contains('/'))) { - $RepoName = $Repository -replace '(.*?)-(.*)', '$1/$2'; - } - else{ - $RepoName = $Repository; - } - - if ($GuardianPackageName) { - $guardianCliLocation = Join-Path $NugetPackageDirectory (Join-Path $GuardianPackageName (Join-Path 'tools' 'guardian.cmd')) - } else { - $guardianCliLocation = $GuardianCliLocation - } - - $workingDirectory = (Split-Path $SourceDirectory -Parent) - $ValidPath = Test-Path $guardianCliLocation - - if ($ValidPath -eq $False) - { - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Invalid Guardian CLI Location.' - ExitWithExitCode 1 - } - - Exec-BlockVerbosely { - & $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel - } - $gdnFolder = Join-Path $workingDirectory '.gdn' - - if ($TsaOnboard) { - if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) { - Exec-BlockVerbosely { - & $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel - } - if ($LASTEXITCODE -ne 0) { - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-onboard failed with exit code $LASTEXITCODE." - ExitWithExitCode $LASTEXITCODE - } - } else { - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not onboard to TSA -- not all required values ($TsaCodebaseName, $TsaNotificationEmail, $TsaCodebaseAdmin, $TsaBugAreaPath) were specified.' - ExitWithExitCode 1 - } - } - - # Configure a list of tools with a default target directory. Populates the ".gdn/r" directory. - function Configure-ToolsList([object[]] $tools, [string] $targetDirectory) { - if ($tools -and $tools.Count -gt 0) { - Exec-BlockVerbosely { - & $(Join-Path $PSScriptRoot 'configure-sdl-tool.ps1') ` - -GuardianCliLocation $guardianCliLocation ` - -WorkingDirectory $workingDirectory ` - -TargetDirectory $targetDirectory ` - -GdnFolder $gdnFolder ` - -ToolsList $tools ` - -AzureDevOpsAccessToken $AzureDevOpsAccessToken ` - -GuardianLoggerLevel $GuardianLoggerLevel ` - -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams ` - -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams ` - -CodeQLAdditionalRunConfigParams $CodeQLAdditionalRunConfigParams ` - -BinskimAdditionalRunConfigParams $BinskimAdditionalRunConfigParams - if ($BreakOnFailure) { - Exit-IfNZEC "Sdl" - } - } - } - } - - # Configure Artifact and Source tools with default Target directories. - Configure-ToolsList $ArtifactToolsList $ArtifactsDirectory - Configure-ToolsList $SourceToolsList $SourceDirectory - # Configure custom tools with no default Target directory. - Configure-ToolsList $CustomToolsList $null - - # At this point, all tools are configured in the ".gdn" directory. Run them all in a single call. - # (If we used "run" multiple times, each run would overwrite data from earlier runs.) - Exec-BlockVerbosely { - & $(Join-Path $PSScriptRoot 'run-sdl.ps1') ` - -GuardianCliLocation $guardianCliLocation ` - -WorkingDirectory $SourceDirectory ` - -UpdateBaseline $UpdateBaseline ` - -GdnFolder $gdnFolder - } - - if ($TsaPublish) { - if ($TsaBranchName -and $BuildNumber) { - if (-not $TsaRepositoryName) { - $TsaRepositoryName = "$($Repository)-$($BranchName)" - } - Exec-BlockVerbosely { - & $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel - } - if ($LASTEXITCODE -ne 0) { - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-publish failed with exit code $LASTEXITCODE." - ExitWithExitCode $LASTEXITCODE - } - } else { - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not publish to TSA -- not all required values ($TsaBranchName, $BuildNumber) were specified.' - ExitWithExitCode 1 - } - } - - if ($BreakOnFailure) { - Write-Host "Failing the build in case of breaking results..." - Exec-BlockVerbosely { - & $guardianCliLocation break --working-directory $workingDirectory --logger-level $GuardianLoggerLevel - } - } else { - Write-Host "Letting the build pass even if there were breaking results..." - } -} -catch { - Write-Host $_.ScriptStackTrace - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ - exit 1 -} diff --git a/eng/common/sdl/extract-artifact-archives.ps1 b/eng/common/sdl/extract-artifact-archives.ps1 deleted file mode 100644 index 68da4fbf2..000000000 --- a/eng/common/sdl/extract-artifact-archives.ps1 +++ /dev/null @@ -1,63 +0,0 @@ -# This script looks for each archive file in a directory and extracts it into the target directory. -# For example, the file "$InputPath/bin.tar.gz" extracts to "$ExtractPath/bin.tar.gz.extracted/**". -# Uses the "tar" utility added to Windows 10 / Windows 2019 that supports tar.gz and zip. -param( - # Full path to directory where archives are stored. - [Parameter(Mandatory=$true)][string] $InputPath, - # Full path to directory to extract archives into. May be the same as $InputPath. - [Parameter(Mandatory=$true)][string] $ExtractPath -) - -$ErrorActionPreference = 'Stop' -Set-StrictMode -Version 2.0 - -$disableConfigureToolsetImport = $true - -try { - # `tools.ps1` checks $ci to perform some actions. Since the SDL - # scripts don't necessarily execute in the same agent that run the - # build.ps1/sh script this variable isn't automatically set. - $ci = $true - . $PSScriptRoot\..\tools.ps1 - - Measure-Command { - $jobs = @() - - # Find archive files for non-Windows and Windows builds. - $archiveFiles = @( - Get-ChildItem (Join-Path $InputPath "*.tar.gz") - Get-ChildItem (Join-Path $InputPath "*.zip") - ) - - foreach ($targzFile in $archiveFiles) { - $jobs += Start-Job -ScriptBlock { - $file = $using:targzFile - $fileName = [System.IO.Path]::GetFileName($file) - $extractDir = Join-Path $using:ExtractPath "$fileName.extracted" - - New-Item $extractDir -ItemType Directory -Force | Out-Null - - Write-Host "Extracting '$file' to '$extractDir'..." - - # Pipe errors to stdout to prevent PowerShell detecting them and quitting the job early. - # This type of quit skips the catch, so we wouldn't be able to tell which file triggered the - # error. Save output so it can be stored in the exception string along with context. - $output = tar -xf $file -C $extractDir 2>&1 - # Handle NZEC manually rather than using Exit-IfNZEC: we are in a background job, so we - # don't have access to the outer scope. - if ($LASTEXITCODE -ne 0) { - throw "Error extracting '$file': non-zero exit code ($LASTEXITCODE). Output: '$output'" - } - - Write-Host "Extracted to $extractDir" - } - } - - Receive-Job $jobs -Wait - } -} -catch { - Write-Host $_ - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ - ExitWithExitCode 1 -} diff --git a/eng/common/sdl/extract-artifact-packages.ps1 b/eng/common/sdl/extract-artifact-packages.ps1 deleted file mode 100644 index f031ed5b2..000000000 --- a/eng/common/sdl/extract-artifact-packages.ps1 +++ /dev/null @@ -1,82 +0,0 @@ -param( - [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where artifact packages are stored - [Parameter(Mandatory=$true)][string] $ExtractPath # Full path to directory where the packages will be extracted -) - -$ErrorActionPreference = 'Stop' -Set-StrictMode -Version 2.0 - -$disableConfigureToolsetImport = $true - -function ExtractArtifacts { - if (!(Test-Path $InputPath)) { - Write-Host "Input Path does not exist: $InputPath" - ExitWithExitCode 0 - } - $Jobs = @() - Get-ChildItem "$InputPath\*.nupkg" | - ForEach-Object { - $Jobs += Start-Job -ScriptBlock $ExtractPackage -ArgumentList $_.FullName - } - - foreach ($Job in $Jobs) { - Wait-Job -Id $Job.Id | Receive-Job - } -} - -try { - # `tools.ps1` checks $ci to perform some actions. Since the SDL - # scripts don't necessarily execute in the same agent that run the - # build.ps1/sh script this variable isn't automatically set. - $ci = $true - . $PSScriptRoot\..\tools.ps1 - - $ExtractPackage = { - param( - [string] $PackagePath # Full path to a NuGet package - ) - - if (!(Test-Path $PackagePath)) { - Write-PipelineTelemetryError -Category 'Build' -Message "Input file does not exist: $PackagePath" - ExitWithExitCode 1 - } - - $RelevantExtensions = @('.dll', '.exe', '.pdb') - Write-Host -NoNewLine 'Extracting ' ([System.IO.Path]::GetFileName($PackagePath)) '...' - - $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath) - $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId - - Add-Type -AssemblyName System.IO.Compression.FileSystem - - [System.IO.Directory]::CreateDirectory($ExtractPath); - - try { - $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath) - - $zip.Entries | - Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} | - ForEach-Object { - $TargetPath = Join-Path -Path $ExtractPath -ChildPath (Split-Path -Path $_.FullName) - [System.IO.Directory]::CreateDirectory($TargetPath); - - $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.FullName - [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile) - } - } - catch { - Write-Host $_ - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ - ExitWithExitCode 1 - } - finally { - $zip.Dispose() - } - } - Measure-Command { ExtractArtifacts } -} -catch { - Write-Host $_ - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ - ExitWithExitCode 1 -} diff --git a/eng/common/sdl/init-sdl.ps1 b/eng/common/sdl/init-sdl.ps1 deleted file mode 100644 index 3ac1d92b3..000000000 --- a/eng/common/sdl/init-sdl.ps1 +++ /dev/null @@ -1,55 +0,0 @@ -Param( - [string] $GuardianCliLocation, - [string] $Repository, - [string] $BranchName='master', - [string] $WorkingDirectory, - [string] $AzureDevOpsAccessToken, - [string] $GuardianLoggerLevel='Standard' -) - -$ErrorActionPreference = 'Stop' -Set-StrictMode -Version 2.0 -$disableConfigureToolsetImport = $true -$global:LASTEXITCODE = 0 - -# `tools.ps1` checks $ci to perform some actions. Since the SDL -# scripts don't necessarily execute in the same agent that run the -# build.ps1/sh script this variable isn't automatically set. -$ci = $true -. $PSScriptRoot\..\tools.ps1 - -# Don't display the console progress UI - it's a huge perf hit -$ProgressPreference = 'SilentlyContinue' - -# Construct basic auth from AzDO access token; construct URI to the repository's gdn folder stored in that repository; construct location of zip file -$encodedPat = [Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes(":$AzureDevOpsAccessToken")) -$escapedRepository = [Uri]::EscapeDataString("/$Repository/$BranchName/.gdn") -$uri = "https://dev.azure.com/dnceng/internal/_apis/git/repositories/sdl-tool-cfg/Items?path=$escapedRepository&versionDescriptor[versionOptions]=0&`$format=zip&api-version=5.0" -$zipFile = "$WorkingDirectory/gdn.zip" - -Add-Type -AssemblyName System.IO.Compression.FileSystem -$gdnFolder = (Join-Path $WorkingDirectory '.gdn') - -try { - # if the folder does not exist, we'll do a guardian init and push it to the remote repository - Write-Host 'Initializing Guardian...' - Write-Host "$GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel" - & $GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel - if ($LASTEXITCODE -ne 0) { - Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian init failed with exit code $LASTEXITCODE." - ExitWithExitCode $LASTEXITCODE - } - # We create the mainbaseline so it can be edited later - Write-Host "$GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline" - & $GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline - if ($LASTEXITCODE -ne 0) { - Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian baseline failed with exit code $LASTEXITCODE." - ExitWithExitCode $LASTEXITCODE - } - ExitWithExitCode 0 -} -catch { - Write-Host $_.ScriptStackTrace - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ - ExitWithExitCode 1 -} diff --git a/eng/common/sdl/packages.config b/eng/common/sdl/packages.config deleted file mode 100644 index e5f543ea6..000000000 --- a/eng/common/sdl/packages.config +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/eng/common/sdl/run-sdl.ps1 b/eng/common/sdl/run-sdl.ps1 deleted file mode 100644 index 2eac8c78f..000000000 --- a/eng/common/sdl/run-sdl.ps1 +++ /dev/null @@ -1,49 +0,0 @@ -Param( - [string] $GuardianCliLocation, - [string] $WorkingDirectory, - [string] $GdnFolder, - [string] $UpdateBaseline, - [string] $GuardianLoggerLevel='Standard' -) - -$ErrorActionPreference = 'Stop' -Set-StrictMode -Version 2.0 -$disableConfigureToolsetImport = $true -$global:LASTEXITCODE = 0 - -try { - # `tools.ps1` checks $ci to perform some actions. Since the SDL - # scripts don't necessarily execute in the same agent that run the - # build.ps1/sh script this variable isn't automatically set. - $ci = $true - . $PSScriptRoot\..\tools.ps1 - - # We store config files in the r directory of .gdn - $gdnConfigPath = Join-Path $GdnFolder 'r' - $ValidPath = Test-Path $GuardianCliLocation - - if ($ValidPath -eq $False) - { - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location." - ExitWithExitCode 1 - } - - $gdnConfigFiles = Get-ChildItem $gdnConfigPath -Recurse -Include '*.gdnconfig' - Write-Host "Discovered Guardian config files:" - $gdnConfigFiles | Out-String | Write-Host - - Exec-BlockVerbosely { - & $GuardianCliLocation run ` - --working-directory $WorkingDirectory ` - --baseline mainbaseline ` - --update-baseline $UpdateBaseline ` - --logger-level $GuardianLoggerLevel ` - --config @gdnConfigFiles - Exit-IfNZEC "Sdl" - } -} -catch { - Write-Host $_.ScriptStackTrace - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ - ExitWithExitCode 1 -} diff --git a/eng/common/sdl/sdl.ps1 b/eng/common/sdl/sdl.ps1 deleted file mode 100644 index 648c5068d..000000000 --- a/eng/common/sdl/sdl.ps1 +++ /dev/null @@ -1,38 +0,0 @@ - -function Install-Gdn { - param( - [Parameter(Mandatory=$true)] - [string]$Path, - - # If omitted, install the latest version of Guardian, otherwise install that specific version. - [string]$Version - ) - - $ErrorActionPreference = 'Stop' - Set-StrictMode -Version 2.0 - $disableConfigureToolsetImport = $true - $global:LASTEXITCODE = 0 - - # `tools.ps1` checks $ci to perform some actions. Since the SDL - # scripts don't necessarily execute in the same agent that run the - # build.ps1/sh script this variable isn't automatically set. - $ci = $true - . $PSScriptRoot\..\tools.ps1 - - $argumentList = @("install", "Microsoft.Guardian.Cli", "-Source https://securitytools.pkgs.visualstudio.com/_packaging/Guardian/nuget/v3/index.json", "-OutputDirectory $Path", "-NonInteractive", "-NoCache") - - if ($Version) { - $argumentList += "-Version $Version" - } - - Start-Process nuget -Verbose -ArgumentList $argumentList -NoNewWindow -Wait - - $gdnCliPath = Get-ChildItem -Filter guardian.cmd -Recurse -Path $Path - - if (!$gdnCliPath) - { - Write-PipelineTelemetryError -Category 'Sdl' -Message 'Failure installing Guardian' - } - - return $gdnCliPath.FullName -} \ No newline at end of file diff --git a/eng/common/sdl/trim-assets-version.ps1 b/eng/common/sdl/trim-assets-version.ps1 deleted file mode 100644 index 0daa2a9e9..000000000 --- a/eng/common/sdl/trim-assets-version.ps1 +++ /dev/null @@ -1,75 +0,0 @@ -<# -.SYNOPSIS -Install and run the 'Microsoft.DotNet.VersionTools.Cli' tool with the 'trim-artifacts-version' command to trim the version from the NuGet assets file name. - -.PARAMETER InputPath -Full path to directory where artifact packages are stored - -.PARAMETER Recursive -Search for NuGet packages recursively - -#> - -Param( - [string] $InputPath, - [bool] $Recursive = $true -) - -$CliToolName = "Microsoft.DotNet.VersionTools.Cli" - -function Install-VersionTools-Cli { - param( - [Parameter(Mandatory=$true)][string]$Version - ) - - Write-Host "Installing the package '$CliToolName' with a version of '$version' ..." - $feed = "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json" - - $argumentList = @("tool", "install", "--local", "$CliToolName", "--add-source $feed", "--no-cache", "--version $Version", "--create-manifest-if-needed") - Start-Process "$dotnet" -Verbose -ArgumentList $argumentList -NoNewWindow -Wait -} - -# ------------------------------------------------------------------- - -if (!(Test-Path $InputPath)) { - Write-Host "Input Path '$InputPath' does not exist" - ExitWithExitCode 1 -} - -$ErrorActionPreference = 'Stop' -Set-StrictMode -Version 2.0 - -$disableConfigureToolsetImport = $true -$global:LASTEXITCODE = 0 - -# `tools.ps1` checks $ci to perform some actions. Since the SDL -# scripts don't necessarily execute in the same agent that run the -# build.ps1/sh script this variable isn't automatically set. -$ci = $true -. $PSScriptRoot\..\tools.ps1 - -try { - $dotnetRoot = InitializeDotNetCli -install:$true - $dotnet = "$dotnetRoot\dotnet.exe" - - $toolsetVersion = Read-ArcadeSdkVersion - Install-VersionTools-Cli -Version $toolsetVersion - - $cliToolFound = (& "$dotnet" tool list --local | Where-Object {$_.Split(' ')[0] -eq $CliToolName}) - if ($null -eq $cliToolFound) { - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "The '$CliToolName' tool is not installed." - ExitWithExitCode 1 - } - - Exec-BlockVerbosely { - & "$dotnet" $CliToolName trim-assets-version ` - --assets-path $InputPath ` - --recursive $Recursive - Exit-IfNZEC "Sdl" - } -} -catch { - Write-Host $_ - Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ - ExitWithExitCode 1 -} diff --git a/eng/common/template-guidance.md b/eng/common/template-guidance.md deleted file mode 100644 index 98bbc1ded..000000000 --- a/eng/common/template-guidance.md +++ /dev/null @@ -1,133 +0,0 @@ -# Overview - -Arcade provides templates for public (`/templates`) and 1ES pipeline templates (`/templates-official`) scenarios. Pipelines which are required to be managed by 1ES pipeline templates should reference `/templates-offical`, all other pipelines may reference `/templates`. - -## How to use - -Basic guidance is: - -- 1ES Pipeline Template or 1ES Microbuild template runs should reference `eng/common/templates-official`. Any internal production-graded pipeline should use these templates. - -- All other runs should reference `eng/common/templates`. - -See [azure-pipelines.yml](../../azure-pipelines.yml) (templates-official example) or [azure-pipelines-pr.yml](../../azure-pipelines-pr.yml) (templates example) for examples. - -#### The `templateIs1ESManaged` parameter - -The `templateIs1ESManaged` is available on most templates and affects which of the variants is used for nested templates. See [Development Notes](#development-notes) below for more information on the `templateIs1ESManaged1 parameter. - -- For templates under `job/`, `jobs/`, `steps`, or `post-build/`, this parameter must be explicitly set. - -## Multiple outputs - -1ES pipeline templates impose a policy where every publish artifact execution results in additional security scans being injected into your pipeline. When using `templates-official/jobs/jobs.yml`, Arcade reduces the number of additional security injections by gathering all publishing outputs into the [Build.ArtifactStagingDirectory](https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#build-variables-devops-services), and utilizing the [outputParentDirectory](https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/outputs#multiple-outputs) feature of 1ES pipeline templates. When implementing your pipeline, if you ensure publish artifacts are located in the `$(Build.ArtifactStagingDirectory)`, and utilize the 1ES provided template context, then you can reduce the number of security scans for your pipeline. - -Example: -``` yaml -# azure-pipelines.yml -extends: - template: azure-pipelines/MicroBuild.1ES.Official.yml@MicroBuildTemplate - parameters: - stages: - - stage: build - jobs: - - template: /eng/common/templates-official/jobs/jobs.yml@self - parameters: - # 1ES makes use of outputs to reduce security task injection overhead - templateContext: - outputs: - - output: pipelineArtifact - displayName: 'Publish logs from source' - continueOnError: true - condition: always() - targetPath: $(Build.ArtifactStagingDirectory)/artifacts/log - artifactName: Logs - jobs: - - job: Windows - steps: - - script: echo "friendly neighborhood" > artifacts/marvel/spiderman.txt - # copy build outputs to artifact staging directory for publishing - - task: CopyFiles@2 - displayName: Gather build output - inputs: - SourceFolder: '$(Build.SourcesDirectory)/artifacts/marvel' - Contents: '**' - TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/marvel' -``` - -Note: Multiple outputs are ONLY applicable to 1ES PT publishing (only usable when referencing `templates-official`). - -## Development notes - -**Folder / file structure** - -``` text -eng\common\ - [templates || templates-official]\ - job\ - job.yml (shim + artifact publishing logic) - onelocbuild.yml (shim) - publish-build-assets.yml (shim) - source-build.yml (shim) - source-index-stage1.yml (shim) - jobs\ - codeql-build.yml (shim) - jobs.yml (shim) - source-build.yml (shim) - post-build\ - post-build.yml (shim) - common-variabls.yml (shim) - setup-maestro-vars.yml (shim) - steps\ - publish-build-artifacts.yml (logic) - publish-pipeline-artifacts.yml (logic) - component-governance.yml (shim) - generate-sbom.yml (shim) - publish-logs.yml (shim) - retain-build.yml (shim) - send-to-helix.yml (shim) - source-build.yml (shim) - variables\ - pool-providers.yml (logic + redirect) # templates/variables/pool-providers.yml will redirect to templates-official/variables/pool-providers.yml if you are running in the internal project - sdl-variables.yml (logic) - core-templates\ - job\ - job.yml (logic) - onelocbuild.yml (logic) - publish-build-assets.yml (logic) - source-build.yml (logic) - source-index-stage1.yml (logic) - jobs\ - codeql-build.yml (logic) - jobs.yml (logic) - source-build.yml (logic) - post-build\ - common-variabls.yml (logic) - post-build.yml (logic) - setup-maestro-vars.yml (logic) - steps\ - component-governance.yml (logic) - generate-sbom.yml (logic) - publish-build-artifacts.yml (redirect) - publish-logs.yml (logic) - publish-pipeline-artifacts.yml (redirect) - retain-build.yml (logic) - send-to-helix.yml (logic) - source-build.yml (logic) - variables\ - pool-providers.yml (redirect) -``` - -In the table above, a file is designated as "shim", "logic", or "redirect". - -- shim - represents a yaml file which is an intermediate step between pipeline logic and .Net Core Engineering's templates (`core-templates`) and defines the `is1ESPipeline` parameter value. - -- logic - represents actual base template logic. - -- redirect- represents a file in `core-templates` which redirects to the "logic" file in either `templates` or `templates-official`. - -Logic for Arcade's templates live **primarily** in the `core-templates` folder. The exceptions to the location of the logic files are around artifact publishing, which is handled differently between 1es pipeline templates and standard templates. `templates` and `templates-official` provide shim entry points which redirect to `core-templates` while also defining the `is1ESPipeline` parameter. If a shim is referenced in `templates`, then `is1ESPipeline` is set to `false`. If a shim is referenced in `templates-official`, then `is1ESPipeline` is set to `true`. - -Within `templates` and `templates-official`, the templates at the "stages", and "jobs" / "job" level have been replaced with shims. Templates at the "steps" and "variables" level are typically too granular to be replaced with shims and instead persist logic which is directly applicable to either scenario. - -Within `core-templates`, there are a handful of places where logic is dependent on which shim entry point was used. In those places, we redirect back to the respective logic file in `templates` or `templates-official`. diff --git a/eng/common/templates-official/job/job.yml b/eng/common/templates-official/job/job.yml deleted file mode 100644 index a8a943287..000000000 --- a/eng/common/templates-official/job/job.yml +++ /dev/null @@ -1,83 +0,0 @@ -parameters: -# Sbom related params - enableSbom: true - runAsPublic: false - PackageVersion: 9.0.0 - BuildDropPath: '$(Build.SourcesDirectory)/artifacts' - -jobs: -- template: /eng/common/core-templates/job/job.yml - parameters: - is1ESPipeline: true - - componentGovernanceSteps: - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}: - - template: /eng/common/templates/steps/generate-sbom.yml - parameters: - PackageVersion: ${{ parameters.packageVersion }} - BuildDropPath: ${{ parameters.buildDropPath }} - ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom - publishArtifacts: false - - # publish artifacts - # for 1ES managed templates, use the templateContext.output to handle multiple outputs. - templateContext: - outputParentDirectory: $(Build.ArtifactStagingDirectory) - outputs: - - ${{ if ne(parameters.artifacts.publish, '') }}: - - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: - - output: buildArtifacts - displayName: Publish pipeline artifacts - PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts' - ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} - condition: always() - retryCountOnTaskFailure: 10 # for any logs being locked - continueOnError: true - - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: - - output: pipelineArtifact - targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log' - artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)_Attempt$(System.JobAttempt)') }} - displayName: 'Publish logs' - continueOnError: true - condition: always() - retryCountOnTaskFailure: 10 # for any logs being locked - sbomEnabled: false # we don't need SBOM for logs - - - ${{ if eq(parameters.enablePublishBuildArtifacts, true) }}: - - output: buildArtifacts - displayName: Publish Logs - PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)' - publishLocation: Container - ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)_Attempt$(System.JobAttempt)' ) }} - continueOnError: true - condition: always() - sbomEnabled: false # we don't need SBOM for logs - - - ${{ if eq(parameters.enableBuildRetry, 'true') }}: - - output: pipelineArtifact - targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/eng/common/BuildConfiguration' - artifactName: 'BuildConfiguration' - displayName: 'Publish build retry configuration' - continueOnError: true - sbomEnabled: false # we don't need SBOM for BuildConfiguration - - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}: - - output: pipelineArtifact - displayName: Publish SBOM manifest - continueOnError: true - targetPath: $(Build.ArtifactStagingDirectory)/sbom - artifactName: $(ARTIFACT_NAME) - - # add any outputs provided via root yaml - - ${{ if ne(parameters.templateContext.outputs, '') }}: - - ${{ each output in parameters.templateContext.outputs }}: - - ${{ output }} - - # add any remaining templateContext properties - ${{ each context in parameters.templateContext }}: - ${{ if and(ne(context.key, 'outputParentDirectory'), ne(context.key, 'outputs')) }}: - ${{ context.key }}: ${{ context.value }} - - ${{ each parameter in parameters }}: - ${{ if and(ne(parameter.key, 'templateContext'), ne(parameter.key, 'is1ESPipeline')) }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/job/onelocbuild.yml b/eng/common/templates-official/job/onelocbuild.yml deleted file mode 100644 index 0f0c514b9..000000000 --- a/eng/common/templates-official/job/onelocbuild.yml +++ /dev/null @@ -1,7 +0,0 @@ -jobs: -- template: /eng/common/core-templates/job/onelocbuild.yml - parameters: - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/job/publish-build-assets.yml b/eng/common/templates-official/job/publish-build-assets.yml deleted file mode 100644 index d667a70e8..000000000 --- a/eng/common/templates-official/job/publish-build-assets.yml +++ /dev/null @@ -1,7 +0,0 @@ -jobs: -- template: /eng/common/core-templates/job/publish-build-assets.yml - parameters: - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/job/source-build.yml b/eng/common/templates-official/job/source-build.yml deleted file mode 100644 index 1a480034b..000000000 --- a/eng/common/templates-official/job/source-build.yml +++ /dev/null @@ -1,7 +0,0 @@ -jobs: -- template: /eng/common/core-templates/job/source-build.yml - parameters: - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/job/source-index-stage1.yml b/eng/common/templates-official/job/source-index-stage1.yml deleted file mode 100644 index 6d5ead316..000000000 --- a/eng/common/templates-official/job/source-index-stage1.yml +++ /dev/null @@ -1,7 +0,0 @@ -jobs: -- template: /eng/common/core-templates/job/source-index-stage1.yml - parameters: - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/jobs/codeql-build.yml b/eng/common/templates-official/jobs/codeql-build.yml deleted file mode 100644 index a726322ec..000000000 --- a/eng/common/templates-official/jobs/codeql-build.yml +++ /dev/null @@ -1,7 +0,0 @@ -jobs: -- template: /eng/common/core-templates/jobs/codeql-build.yml - parameters: - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/jobs/jobs.yml b/eng/common/templates-official/jobs/jobs.yml deleted file mode 100644 index 007deddae..000000000 --- a/eng/common/templates-official/jobs/jobs.yml +++ /dev/null @@ -1,7 +0,0 @@ -jobs: -- template: /eng/common/core-templates/jobs/jobs.yml - parameters: - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/jobs/source-build.yml b/eng/common/templates-official/jobs/source-build.yml deleted file mode 100644 index 483e7b611..000000000 --- a/eng/common/templates-official/jobs/source-build.yml +++ /dev/null @@ -1,7 +0,0 @@ -jobs: -- template: /eng/common/core-templates/jobs/source-build.yml - parameters: - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates-official/post-build/common-variables.yml b/eng/common/templates-official/post-build/common-variables.yml deleted file mode 100644 index c32fc4923..000000000 --- a/eng/common/templates-official/post-build/common-variables.yml +++ /dev/null @@ -1,8 +0,0 @@ -variables: -- template: /eng/common/core-templates/post-build/common-variables.yml - parameters: - # Specifies whether to use 1ES - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates-official/post-build/post-build.yml b/eng/common/templates-official/post-build/post-build.yml deleted file mode 100644 index 2364c0fd4..000000000 --- a/eng/common/templates-official/post-build/post-build.yml +++ /dev/null @@ -1,8 +0,0 @@ -stages: -- template: /eng/common/core-templates/post-build/post-build.yml - parameters: - # Specifies whether to use 1ES - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/post-build/setup-maestro-vars.yml b/eng/common/templates-official/post-build/setup-maestro-vars.yml deleted file mode 100644 index 024397d87..000000000 --- a/eng/common/templates-official/post-build/setup-maestro-vars.yml +++ /dev/null @@ -1,8 +0,0 @@ -steps: -- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml - parameters: - # Specifies whether to use 1ES - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates-official/steps/component-governance.yml b/eng/common/templates-official/steps/component-governance.yml deleted file mode 100644 index 30bb3985c..000000000 --- a/eng/common/templates-official/steps/component-governance.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/component-governance.yml - parameters: - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/enable-internal-runtimes.yml b/eng/common/templates-official/steps/enable-internal-runtimes.yml deleted file mode 100644 index f9dd238c6..000000000 --- a/eng/common/templates-official/steps/enable-internal-runtimes.yml +++ /dev/null @@ -1,9 +0,0 @@ -# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64' -# variable with the base64-encoded SAS token, by default -steps: -- template: /eng/common/core-templates/steps/enable-internal-runtimes.yml - parameters: - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/enable-internal-sources.yml b/eng/common/templates-official/steps/enable-internal-sources.yml deleted file mode 100644 index e6d571822..000000000 --- a/eng/common/templates-official/steps/enable-internal-sources.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/enable-internal-sources.yml - parameters: - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates-official/steps/generate-sbom.yml b/eng/common/templates-official/steps/generate-sbom.yml deleted file mode 100644 index 9a89a4706..000000000 --- a/eng/common/templates-official/steps/generate-sbom.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/generate-sbom.yml - parameters: - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/get-delegation-sas.yml b/eng/common/templates-official/steps/get-delegation-sas.yml deleted file mode 100644 index c5a9c1f82..000000000 --- a/eng/common/templates-official/steps/get-delegation-sas.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/get-delegation-sas.yml - parameters: - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/get-federated-access-token.yml b/eng/common/templates-official/steps/get-federated-access-token.yml deleted file mode 100644 index c8dcf6b81..000000000 --- a/eng/common/templates-official/steps/get-federated-access-token.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/get-federated-access-token.yml - parameters: - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates-official/steps/publish-build-artifacts.yml b/eng/common/templates-official/steps/publish-build-artifacts.yml deleted file mode 100644 index fcf6637b2..000000000 --- a/eng/common/templates-official/steps/publish-build-artifacts.yml +++ /dev/null @@ -1,46 +0,0 @@ -parameters: -- name: displayName - type: string - default: 'Publish to Build Artifact' - -- name: condition - type: string - default: succeeded() - -- name: artifactName - type: string - -- name: pathToPublish - type: string - -- name: continueOnError - type: boolean - default: false - -- name: publishLocation - type: string - default: 'Container' - -- name: is1ESPipeline - type: boolean - default: true - -- name: retryCountOnTaskFailure - type: string - default: 10 - -steps: -- ${{ if ne(parameters.is1ESPipeline, true) }}: - - 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error -- task: 1ES.PublishBuildArtifacts@1 - displayName: ${{ parameters.displayName }} - condition: ${{ parameters.condition }} - ${{ if parameters.continueOnError }}: - continueOnError: ${{ parameters.continueOnError }} - inputs: - PublishLocation: ${{ parameters.publishLocation }} - PathtoPublish: ${{ parameters.pathToPublish }} - ${{ if parameters.artifactName }}: - ArtifactName: ${{ parameters.artifactName }} - ${{ if parameters.retryCountOnTaskFailure }}: - retryCountOnTaskFailure: ${{ parameters.retryCountOnTaskFailure }} diff --git a/eng/common/templates-official/steps/publish-logs.yml b/eng/common/templates-official/steps/publish-logs.yml deleted file mode 100644 index 579fd531e..000000000 --- a/eng/common/templates-official/steps/publish-logs.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/publish-logs.yml - parameters: - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/publish-pipeline-artifacts.yml b/eng/common/templates-official/steps/publish-pipeline-artifacts.yml deleted file mode 100644 index 172f9f0fd..000000000 --- a/eng/common/templates-official/steps/publish-pipeline-artifacts.yml +++ /dev/null @@ -1,28 +0,0 @@ -parameters: -- name: is1ESPipeline - type: boolean - default: true - -- name: args - type: object - default: {} - -steps: -- ${{ if ne(parameters.is1ESPipeline, true) }}: - - 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error -- task: 1ES.PublishPipelineArtifact@1 - displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }} - ${{ if parameters.args.condition }}: - condition: ${{ parameters.args.condition }} - ${{ else }}: - condition: succeeded() - ${{ if parameters.args.continueOnError }}: - continueOnError: ${{ parameters.args.continueOnError }} - inputs: - targetPath: ${{ parameters.args.targetPath }} - ${{ if parameters.args.artifactName }}: - artifactName: ${{ parameters.args.artifactName }} - ${{ if parameters.args.properties }}: - properties: ${{ parameters.args.properties }} - ${{ if parameters.args.sbomEnabled }}: - sbomEnabled: ${{ parameters.args.sbomEnabled }} diff --git a/eng/common/templates-official/steps/retain-build.yml b/eng/common/templates-official/steps/retain-build.yml deleted file mode 100644 index 559455150..000000000 --- a/eng/common/templates-official/steps/retain-build.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/retain-build.yml - parameters: - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/send-to-helix.yml b/eng/common/templates-official/steps/send-to-helix.yml deleted file mode 100644 index 6500f21bf..000000000 --- a/eng/common/templates-official/steps/send-to-helix.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/send-to-helix.yml - parameters: - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/source-build.yml b/eng/common/templates-official/steps/source-build.yml deleted file mode 100644 index 8f92c49e7..000000000 --- a/eng/common/templates-official/steps/source-build.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/source-build.yml - parameters: - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/source-index-stage1-publish.yml b/eng/common/templates-official/steps/source-index-stage1-publish.yml deleted file mode 100644 index 9b8b80942..000000000 --- a/eng/common/templates-official/steps/source-index-stage1-publish.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/source-index-stage1-publish.yml - parameters: - is1ESPipeline: true - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/variables/pool-providers.yml b/eng/common/templates-official/variables/pool-providers.yml deleted file mode 100644 index 1f308b24e..000000000 --- a/eng/common/templates-official/variables/pool-providers.yml +++ /dev/null @@ -1,45 +0,0 @@ -# Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool, -# otherwise it should go into the "normal" pools. This separates out the queueing and billing of released branches. - -# Motivation: -# Once a given branch of a repository's output has been officially "shipped" once, it is then considered to be COGS -# (Cost of goods sold) and should be moved to a servicing pool provider. This allows both separation of queueing -# (allowing release builds and main PR builds to not intefere with each other) and billing (required for COGS. -# Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services -# team needs to move resources around and create new and potentially differently-named pools. Using this template -# file from an Arcade-ified repo helps guard against both having to update one's release/* branches and renaming. - -# How to use: -# This yaml assumes your shipped product branches use the naming convention "release/..." (which many do). -# If we find alternate naming conventions in broad usage it can be added to the condition below. -# -# First, import the template in an arcade-ified repo to pick up the variables, e.g.: -# -# variables: -# - template: /eng/common/templates-official/variables/pool-providers.yml -# -# ... then anywhere specifying the pool provider use the runtime variables, -# $(DncEngInternalBuildPool) -# -# pool: -# name: $(DncEngInternalBuildPool) -# image: 1es-windows-2022 - -variables: - # Coalesce the target and source branches so we know when a PR targets a release branch - # If these variables are somehow missing, fall back to main (tends to have more capacity) - - # Any new -Svc alternative pools should have variables added here to allow for splitting work - - - name: DncEngInternalBuildPool - value: $[ - replace( - replace( - eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), - True, - 'NetCore1ESPool-Svc-Internal' - ), - False, - 'NetCore1ESPool-Internal' - ) - ] \ No newline at end of file diff --git a/eng/common/templates-official/variables/sdl-variables.yml b/eng/common/templates-official/variables/sdl-variables.yml deleted file mode 100644 index dbdd66d4a..000000000 --- a/eng/common/templates-official/variables/sdl-variables.yml +++ /dev/null @@ -1,7 +0,0 @@ -variables: -# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in -# sync with the packages.config file. -- name: DefaultGuardianVersion - value: 0.109.0 -- name: GuardianPackagesConfigFile - value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config \ No newline at end of file diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml deleted file mode 100644 index 7cbf668c2..000000000 --- a/eng/common/templates/job/job.yml +++ /dev/null @@ -1,84 +0,0 @@ -parameters: - enablePublishBuildArtifacts: false - disableComponentGovernance: '' - componentGovernanceIgnoreDirectories: '' -# Sbom related params - enableSbom: true - runAsPublic: false - PackageVersion: 9.0.0 - BuildDropPath: '$(Build.SourcesDirectory)/artifacts' - -jobs: -- template: /eng/common/core-templates/job/job.yml - parameters: - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ if and(ne(parameter.key, 'steps'), ne(parameter.key, 'is1ESPipeline')) }}: - ${{ parameter.key }}: ${{ parameter.value }} - - steps: - - ${{ each step in parameters.steps }}: - - ${{ step }} - - componentGovernanceSteps: - - template: /eng/common/templates/steps/component-governance.yml - parameters: - ${{ if eq(parameters.disableComponentGovernance, '') }}: - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}: - disableComponentGovernance: false - ${{ else }}: - disableComponentGovernance: true - ${{ else }}: - disableComponentGovernance: ${{ parameters.disableComponentGovernance }} - componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} - - artifactPublishSteps: - - ${{ if ne(parameters.artifacts.publish, '') }}: - - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: - - template: /eng/common/core-templates/steps/publish-build-artifacts.yml - parameters: - is1ESPipeline: false - args: - displayName: Publish pipeline artifacts - pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts' - publishLocation: Container - artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} - continueOnError: true - condition: always() - retryCountOnTaskFailure: 10 # for any logs being locked - - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: - - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml - parameters: - is1ESPipeline: false - args: - targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log' - artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }} - displayName: 'Publish logs' - continueOnError: true - condition: always() - retryCountOnTaskFailure: 10 # for any logs being locked - sbomEnabled: false # we don't need SBOM for logs - - - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}: - - template: /eng/common/core-templates/steps/publish-build-artifacts.yml - parameters: - is1ESPipeline: false - args: - displayName: Publish Logs - pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)' - publishLocation: Container - artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)_Attempt$(System.JobAttempt)' ) }} - continueOnError: true - condition: always() - - - ${{ if eq(parameters.enableBuildRetry, 'true') }}: - - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml - parameters: - is1ESPipeline: false - args: - targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration' - artifactName: 'BuildConfiguration' - displayName: 'Publish build retry configuration' - continueOnError: true - sbomEnabled: false # we don't need SBOM for BuildConfiguration diff --git a/eng/common/templates/job/onelocbuild.yml b/eng/common/templates/job/onelocbuild.yml deleted file mode 100644 index ff829dc4c..000000000 --- a/eng/common/templates/job/onelocbuild.yml +++ /dev/null @@ -1,7 +0,0 @@ -jobs: -- template: /eng/common/core-templates/job/onelocbuild.yml - parameters: - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml deleted file mode 100644 index ab2edec2a..000000000 --- a/eng/common/templates/job/publish-build-assets.yml +++ /dev/null @@ -1,7 +0,0 @@ -jobs: -- template: /eng/common/core-templates/job/publish-build-assets.yml - parameters: - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/job/source-build.yml b/eng/common/templates/job/source-build.yml deleted file mode 100644 index e44d47b1d..000000000 --- a/eng/common/templates/job/source-build.yml +++ /dev/null @@ -1,7 +0,0 @@ -jobs: -- template: /eng/common/core-templates/job/source-build.yml - parameters: - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/job/source-index-stage1.yml b/eng/common/templates/job/source-index-stage1.yml deleted file mode 100644 index 89f329159..000000000 --- a/eng/common/templates/job/source-index-stage1.yml +++ /dev/null @@ -1,7 +0,0 @@ -jobs: -- template: /eng/common/core-templates/job/source-index-stage1.yml - parameters: - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/jobs/codeql-build.yml b/eng/common/templates/jobs/codeql-build.yml deleted file mode 100644 index 517f24d6a..000000000 --- a/eng/common/templates/jobs/codeql-build.yml +++ /dev/null @@ -1,7 +0,0 @@ -jobs: -- template: /eng/common/core-templates/jobs/codeql-build.yml - parameters: - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/jobs/jobs.yml b/eng/common/templates/jobs/jobs.yml deleted file mode 100644 index 388e9037b..000000000 --- a/eng/common/templates/jobs/jobs.yml +++ /dev/null @@ -1,7 +0,0 @@ -jobs: -- template: /eng/common/core-templates/jobs/jobs.yml - parameters: - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/jobs/source-build.yml b/eng/common/templates/jobs/source-build.yml deleted file mode 100644 index 818d4c326..000000000 --- a/eng/common/templates/jobs/source-build.yml +++ /dev/null @@ -1,7 +0,0 @@ -jobs: -- template: /eng/common/core-templates/jobs/source-build.yml - parameters: - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates/post-build/common-variables.yml b/eng/common/templates/post-build/common-variables.yml deleted file mode 100644 index 7fa105875..000000000 --- a/eng/common/templates/post-build/common-variables.yml +++ /dev/null @@ -1,8 +0,0 @@ -variables: -- template: /eng/common/core-templates/post-build/common-variables.yml - parameters: - # Specifies whether to use 1ES - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml deleted file mode 100644 index 53ede714b..000000000 --- a/eng/common/templates/post-build/post-build.yml +++ /dev/null @@ -1,8 +0,0 @@ -stages: -- template: /eng/common/core-templates/post-build/post-build.yml - parameters: - # Specifies whether to use 1ES - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates/post-build/setup-maestro-vars.yml b/eng/common/templates/post-build/setup-maestro-vars.yml deleted file mode 100644 index a79fab5b4..000000000 --- a/eng/common/templates/post-build/setup-maestro-vars.yml +++ /dev/null @@ -1,8 +0,0 @@ -steps: -- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml - parameters: - # Specifies whether to use 1ES - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates/steps/component-governance.yml b/eng/common/templates/steps/component-governance.yml deleted file mode 100644 index c12a5f8d2..000000000 --- a/eng/common/templates/steps/component-governance.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/component-governance.yml - parameters: - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/enable-internal-runtimes.yml b/eng/common/templates/steps/enable-internal-runtimes.yml deleted file mode 100644 index b21a8038c..000000000 --- a/eng/common/templates/steps/enable-internal-runtimes.yml +++ /dev/null @@ -1,10 +0,0 @@ -# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64' -# variable with the base64-encoded SAS token, by default - -steps: -- template: /eng/common/core-templates/steps/enable-internal-runtimes.yml - parameters: - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/enable-internal-sources.yml b/eng/common/templates/steps/enable-internal-sources.yml deleted file mode 100644 index 5f87e9abb..000000000 --- a/eng/common/templates/steps/enable-internal-sources.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/enable-internal-sources.yml - parameters: - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates/steps/generate-sbom.yml b/eng/common/templates/steps/generate-sbom.yml deleted file mode 100644 index 26dc00a2e..000000000 --- a/eng/common/templates/steps/generate-sbom.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/generate-sbom.yml - parameters: - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/get-delegation-sas.yml b/eng/common/templates/steps/get-delegation-sas.yml deleted file mode 100644 index 83760c979..000000000 --- a/eng/common/templates/steps/get-delegation-sas.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/get-delegation-sas.yml - parameters: - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/get-federated-access-token.yml b/eng/common/templates/steps/get-federated-access-token.yml deleted file mode 100644 index 31e151d9d..000000000 --- a/eng/common/templates/steps/get-federated-access-token.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/get-federated-access-token.yml - parameters: - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates/steps/publish-build-artifacts.yml b/eng/common/templates/steps/publish-build-artifacts.yml deleted file mode 100644 index 605e602e9..000000000 --- a/eng/common/templates/steps/publish-build-artifacts.yml +++ /dev/null @@ -1,46 +0,0 @@ -parameters: -- name: is1ESPipeline - type: boolean - default: false - -- name: displayName - type: string - default: 'Publish to Build Artifact' - -- name: condition - type: string - default: succeeded() - -- name: artifactName - type: string - -- name: pathToPublish - type: string - -- name: continueOnError - type: boolean - default: false - -- name: publishLocation - type: string - default: 'Container' - -- name: retryCountOnTaskFailure - type: string - default: 10 - -steps: -- ${{ if eq(parameters.is1ESPipeline, true) }}: - - 'eng/common/templates cannot be referenced from a 1ES managed template': error -- task: PublishBuildArtifacts@1 - displayName: ${{ parameters.displayName }} - condition: ${{ parameters.condition }} - ${{ if parameters.continueOnError }}: - continueOnError: ${{ parameters.continueOnError }} - inputs: - PublishLocation: ${{ parameters.publishLocation }} - PathtoPublish: ${{ parameters.pathToPublish }} - ${{ if parameters.artifactName }}: - ArtifactName: ${{ parameters.artifactName }} - ${{ if parameters.retryCountOnTaskFailure }}: - retryCountOnTaskFailure: ${{ parameters.retryCountOnTaskFailure }} diff --git a/eng/common/templates/steps/publish-logs.yml b/eng/common/templates/steps/publish-logs.yml deleted file mode 100644 index 4ea86bd88..000000000 --- a/eng/common/templates/steps/publish-logs.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/publish-logs.yml - parameters: - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/publish-pipeline-artifacts.yml b/eng/common/templates/steps/publish-pipeline-artifacts.yml deleted file mode 100644 index 5dd698b21..000000000 --- a/eng/common/templates/steps/publish-pipeline-artifacts.yml +++ /dev/null @@ -1,34 +0,0 @@ -parameters: -- name: is1ESPipeline - type: boolean - default: false - -- name: args - type: object - default: {} - -steps: -- ${{ if eq(parameters.is1ESPipeline, true) }}: - - 'eng/common/templates cannot be referenced from a 1ES managed template': error -- task: PublishPipelineArtifact@1 - displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }} - ${{ if parameters.args.condition }}: - condition: ${{ parameters.args.condition }} - ${{ else }}: - condition: succeeded() - ${{ if parameters.args.continueOnError }}: - continueOnError: ${{ parameters.args.continueOnError }} - inputs: - targetPath: ${{ parameters.args.targetPath }} - ${{ if parameters.args.artifactName }}: - artifactName: ${{ parameters.args.artifactName }} - ${{ if parameters.args.publishLocation }}: - publishLocation: ${{ parameters.args.publishLocation }} - ${{ if parameters.args.fileSharePath }}: - fileSharePath: ${{ parameters.args.fileSharePath }} - ${{ if parameters.args.Parallel }}: - parallel: ${{ parameters.args.Parallel }} - ${{ if parameters.args.parallelCount }}: - parallelCount: ${{ parameters.args.parallelCount }} - ${{ if parameters.args.properties }}: - properties: ${{ parameters.args.properties }} \ No newline at end of file diff --git a/eng/common/templates/steps/retain-build.yml b/eng/common/templates/steps/retain-build.yml deleted file mode 100644 index 8e841ace3..000000000 --- a/eng/common/templates/steps/retain-build.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/retain-build.yml - parameters: - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/send-to-helix.yml b/eng/common/templates/steps/send-to-helix.yml deleted file mode 100644 index 39f99fc27..000000000 --- a/eng/common/templates/steps/send-to-helix.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/send-to-helix.yml - parameters: - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/source-build.yml b/eng/common/templates/steps/source-build.yml deleted file mode 100644 index 23c1d6f4e..000000000 --- a/eng/common/templates/steps/source-build.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/source-build.yml - parameters: - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/source-index-stage1-publish.yml b/eng/common/templates/steps/source-index-stage1-publish.yml deleted file mode 100644 index 182cec33a..000000000 --- a/eng/common/templates/steps/source-index-stage1-publish.yml +++ /dev/null @@ -1,7 +0,0 @@ -steps: -- template: /eng/common/core-templates/steps/source-index-stage1-publish.yml - parameters: - is1ESPipeline: false - - ${{ each parameter in parameters }}: - ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/vmr-sync.yml b/eng/common/templates/steps/vmr-sync.yml deleted file mode 100644 index 599afb618..000000000 --- a/eng/common/templates/steps/vmr-sync.yml +++ /dev/null @@ -1,207 +0,0 @@ -### These steps synchronize new code from product repositories into the VMR (https://github.com/dotnet/dotnet). -### They initialize the darc CLI and pull the new updates. -### Changes are applied locally onto the already cloned VMR (located in $vmrPath). - -parameters: -- name: targetRef - displayName: Target revision in dotnet/ to synchronize - type: string - default: $(Build.SourceVersion) - -- name: vmrPath - displayName: Path where the dotnet/dotnet is checked out to - type: string - default: $(Agent.BuildDirectory)/vmr - -- name: additionalSyncs - displayName: Optional list of package names whose repo's source will also be synchronized in the local VMR, e.g. NuGet.Protocol - type: object - default: [] - -steps: -- checkout: vmr - displayName: Clone dotnet/dotnet - path: vmr - clean: true - -- checkout: self - displayName: Clone $(Build.Repository.Name) - path: repo - fetchDepth: 0 - -# This step is needed so that when we get a detached HEAD / shallow clone, -# we still pull the commit into the temporary repo clone to use it during the sync. -# Also unshallow the clone so that forwardflow command would work. -- script: | - git branch repo-head - git rev-parse HEAD - displayName: Label PR commit - workingDirectory: $(Agent.BuildDirectory)/repo - -- script: | - vmr_sha=$(grep -oP '(?<=Sha=")[^"]*' $(Agent.BuildDirectory)/repo/eng/Version.Details.xml) - echo "##vso[task.setvariable variable=vmr_sha]$vmr_sha" - displayName: Obtain the vmr sha from Version.Details.xml (Unix) - condition: ne(variables['Agent.OS'], 'Windows_NT') - workingDirectory: $(Agent.BuildDirectory)/repo - -- powershell: | - [xml]$xml = Get-Content -Path $(Agent.BuildDirectory)/repo/eng/Version.Details.xml - $vmr_sha = $xml.SelectSingleNode("//Source").Sha - Write-Output "##vso[task.setvariable variable=vmr_sha]$vmr_sha" - displayName: Obtain the vmr sha from Version.Details.xml (Windows) - condition: eq(variables['Agent.OS'], 'Windows_NT') - workingDirectory: $(Agent.BuildDirectory)/repo - -- script: | - git fetch --all - git checkout $(vmr_sha) - displayName: Checkout VMR at correct sha for repo flow - workingDirectory: ${{ parameters.vmrPath }} - -- script: | - git config --global user.name "dotnet-maestro[bot]" - git config --global user.email "dotnet-maestro[bot]@users.noreply.github.com" - displayName: Set git author to dotnet-maestro[bot] - workingDirectory: ${{ parameters.vmrPath }} - -- script: | - ./eng/common/vmr-sync.sh \ - --vmr ${{ parameters.vmrPath }} \ - --tmp $(Agent.TempDirectory) \ - --azdev-pat '$(dn-bot-all-orgs-code-r)' \ - --ci \ - --debug - - if [ "$?" -ne 0 ]; then - echo "##vso[task.logissue type=error]Failed to synchronize the VMR" - exit 1 - fi - displayName: Sync repo into VMR (Unix) - condition: ne(variables['Agent.OS'], 'Windows_NT') - workingDirectory: $(Agent.BuildDirectory)/repo - -- script: | - git config --global diff.astextplain.textconv echo - git config --system core.longpaths true - displayName: Configure Windows git (longpaths, astextplain) - condition: eq(variables['Agent.OS'], 'Windows_NT') - -- powershell: | - ./eng/common/vmr-sync.ps1 ` - -vmr ${{ parameters.vmrPath }} ` - -tmp $(Agent.TempDirectory) ` - -azdevPat '$(dn-bot-all-orgs-code-r)' ` - -ci ` - -debugOutput - - if ($LASTEXITCODE -ne 0) { - echo "##vso[task.logissue type=error]Failed to synchronize the VMR" - exit 1 - } - displayName: Sync repo into VMR (Windows) - condition: eq(variables['Agent.OS'], 'Windows_NT') - workingDirectory: $(Agent.BuildDirectory)/repo - -- ${{ if eq(variables['Build.Reason'], 'PullRequest') }}: - - task: CopyFiles@2 - displayName: Collect failed patches - condition: failed() - inputs: - SourceFolder: '$(Agent.TempDirectory)' - Contents: '*.patch' - TargetFolder: '$(Build.ArtifactStagingDirectory)/FailedPatches' - - - publish: '$(Build.ArtifactStagingDirectory)/FailedPatches' - artifact: $(System.JobDisplayName)_FailedPatches - displayName: Upload failed patches - condition: failed() - -- ${{ each assetName in parameters.additionalSyncs }}: - # The vmr-sync script ends up staging files in the local VMR so we have to commit those - - script: - git commit --allow-empty -am "Forward-flow $(Build.Repository.Name)" - displayName: Commit local VMR changes - workingDirectory: ${{ parameters.vmrPath }} - - - script: | - set -ex - - echo "Searching for details of asset ${{ assetName }}..." - - # Use darc to get dependencies information - dependencies=$(./.dotnet/dotnet darc get-dependencies --name '${{ assetName }}' --ci) - - # Extract repository URL and commit hash - repository=$(echo "$dependencies" | grep 'Repo:' | sed 's/Repo:[[:space:]]*//' | head -1) - - if [ -z "$repository" ]; then - echo "##vso[task.logissue type=error]Asset ${{ assetName }} not found in the dependency list" - exit 1 - fi - - commit=$(echo "$dependencies" | grep 'Commit:' | sed 's/Commit:[[:space:]]*//' | head -1) - - echo "Updating the VMR from $repository / $commit..." - cd .. - git clone $repository ${{ assetName }} - cd ${{ assetName }} - git checkout $commit - git branch "sync/$commit" - - ./eng/common/vmr-sync.sh \ - --vmr ${{ parameters.vmrPath }} \ - --tmp $(Agent.TempDirectory) \ - --azdev-pat '$(dn-bot-all-orgs-code-r)' \ - --ci \ - --debug - - if [ "$?" -ne 0 ]; then - echo "##vso[task.logissue type=error]Failed to synchronize the VMR" - exit 1 - fi - displayName: Sync ${{ assetName }} into (Unix) - condition: ne(variables['Agent.OS'], 'Windows_NT') - workingDirectory: $(Agent.BuildDirectory)/repo - - - powershell: | - $ErrorActionPreference = 'Stop' - - Write-Host "Searching for details of asset ${{ assetName }}..." - - $dependencies = .\.dotnet\dotnet darc get-dependencies --name '${{ assetName }}' --ci - - $repository = $dependencies | Select-String -Pattern 'Repo:\s+([^\s]+)' | Select-Object -First 1 - $repository -match 'Repo:\s+([^\s]+)' | Out-Null - $repository = $matches[1] - - if ($repository -eq $null) { - Write-Error "Asset ${{ assetName }} not found in the dependency list" - exit 1 - } - - $commit = $dependencies | Select-String -Pattern 'Commit:\s+([^\s]+)' | Select-Object -First 1 - $commit -match 'Commit:\s+([^\s]+)' | Out-Null - $commit = $matches[1] - - Write-Host "Updating the VMR from $repository / $commit..." - cd .. - git clone $repository ${{ assetName }} - cd ${{ assetName }} - git checkout $commit - git branch "sync/$commit" - - .\eng\common\vmr-sync.ps1 ` - -vmr ${{ parameters.vmrPath }} ` - -tmp $(Agent.TempDirectory) ` - -azdevPat '$(dn-bot-all-orgs-code-r)' ` - -ci ` - -debugOutput - - if ($LASTEXITCODE -ne 0) { - echo "##vso[task.logissue type=error]Failed to synchronize the VMR" - exit 1 - } - displayName: Sync ${{ assetName }} into (Windows) - condition: ne(variables['Agent.OS'], 'Windows_NT') - workingDirectory: $(Agent.BuildDirectory)/repo diff --git a/eng/common/templates/variables/pool-providers.yml b/eng/common/templates/variables/pool-providers.yml deleted file mode 100644 index e0b19c14a..000000000 --- a/eng/common/templates/variables/pool-providers.yml +++ /dev/null @@ -1,59 +0,0 @@ -# Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool, -# otherwise it should go into the "normal" pools. This separates out the queueing and billing of released branches. - -# Motivation: -# Once a given branch of a repository's output has been officially "shipped" once, it is then considered to be COGS -# (Cost of goods sold) and should be moved to a servicing pool provider. This allows both separation of queueing -# (allowing release builds and main PR builds to not intefere with each other) and billing (required for COGS. -# Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services -# team needs to move resources around and create new and potentially differently-named pools. Using this template -# file from an Arcade-ified repo helps guard against both having to update one's release/* branches and renaming. - -# How to use: -# This yaml assumes your shipped product branches use the naming convention "release/..." (which many do). -# If we find alternate naming conventions in broad usage it can be added to the condition below. -# -# First, import the template in an arcade-ified repo to pick up the variables, e.g.: -# -# variables: -# - template: /eng/common/templates/variables/pool-providers.yml -# -# ... then anywhere specifying the pool provider use the runtime variables, -# $(DncEngInternalBuildPool) and $ (DncEngPublicBuildPool), e.g.: -# -# pool: -# name: $(DncEngInternalBuildPool) -# demands: ImageOverride -equals windows.vs2019.amd64 -variables: - - ${{ if eq(variables['System.TeamProject'], 'internal') }}: - - template: /eng/common/templates-official/variables/pool-providers.yml - - ${{ else }}: - # Coalesce the target and source branches so we know when a PR targets a release branch - # If these variables are somehow missing, fall back to main (tends to have more capacity) - - # Any new -Svc alternative pools should have variables added here to allow for splitting work - - name: DncEngPublicBuildPool - value: $[ - replace( - replace( - eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), - True, - 'NetCore-Svc-Public' - ), - False, - 'NetCore-Public' - ) - ] - - - name: DncEngInternalBuildPool - value: $[ - replace( - replace( - eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), - True, - 'NetCore1ESPool-Svc-Internal' - ), - False, - 'NetCore1ESPool-Internal' - ) - ] diff --git a/eng/common/templates/vmr-build-pr.yml b/eng/common/templates/vmr-build-pr.yml deleted file mode 100644 index ce3c29a62..000000000 --- a/eng/common/templates/vmr-build-pr.yml +++ /dev/null @@ -1,42 +0,0 @@ -# This pipeline is used for running the VMR verification of the PR changes in repo-level PRs. -# -# It will run a full set of verification jobs defined in: -# https://github.com/dotnet/dotnet/blob/10060d128e3f470e77265f8490f5e4f72dae738e/eng/pipelines/templates/stages/vmr-build.yml#L27-L38 -# -# For repos that do not need to run the full set, you would do the following: -# -# 1. Copy this YML file to a repo-specific location, i.e. outside of eng/common. -# -# 2. Add `verifications` parameter to VMR template reference -# -# Examples: -# - For source-build stage 1 verification, add the following: -# verifications: [ "source-build-stage1" ] -# -# - For Windows only verifications, add the following: -# verifications: [ "unified-build-windows-x64", "unified-build-windows-x86" ] - -trigger: none -pr: none - -variables: -- template: /eng/common/templates/variables/pool-providers.yml@self - -- name: skipComponentGovernanceDetection # we run CG on internal builds only - value: true - -- name: Codeql.Enabled # we run CodeQL on internal builds only - value: false - -resources: - repositories: - - repository: vmr - type: github - name: dotnet/dotnet - endpoint: dotnet - -stages: -- template: /eng/pipelines/templates/stages/vmr-build.yml@vmr - parameters: - isBuiltFromVmr: false - scope: lite diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 deleted file mode 100644 index 996a5f9c8..000000000 --- a/eng/common/tools.ps1 +++ /dev/null @@ -1,942 +0,0 @@ -# Initialize variables if they aren't already defined. -# These may be defined as parameters of the importing script, or set after importing this script. - -# CI mode - set to true on CI server for PR validation build or official build. -[bool]$ci = if (Test-Path variable:ci) { $ci } else { $false } - -# Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names. -[string]$configuration = if (Test-Path variable:configuration) { $configuration } else { 'Debug' } - -# Set to true to opt out of outputting binary log while running in CI -[bool]$excludeCIBinarylog = if (Test-Path variable:excludeCIBinarylog) { $excludeCIBinarylog } else { $false } - -# Set to true to output binary log from msbuild. Note that emitting binary log slows down the build. -[bool]$binaryLog = if (Test-Path variable:binaryLog) { $binaryLog } else { $ci -and !$excludeCIBinarylog } - -# Set to true to use the pipelines logger which will enable Azure logging output. -# https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md -# This flag is meant as a temporary opt-opt for the feature while validate it across -# our consumers. It will be deleted in the future. -[bool]$pipelinesLog = if (Test-Path variable:pipelinesLog) { $pipelinesLog } else { $ci } - -# Turns on machine preparation/clean up code that changes the machine state (e.g. kills build processes). -[bool]$prepareMachine = if (Test-Path variable:prepareMachine) { $prepareMachine } else { $false } - -# True to restore toolsets and dependencies. -[bool]$restore = if (Test-Path variable:restore) { $restore } else { $true } - -# Adjusts msbuild verbosity level. -[string]$verbosity = if (Test-Path variable:verbosity) { $verbosity } else { 'minimal' } - -# Set to true to reuse msbuild nodes. Recommended to not reuse on CI. -[bool]$nodeReuse = if (Test-Path variable:nodeReuse) { $nodeReuse } else { !$ci } - -# Configures warning treatment in msbuild. -[bool]$warnAsError = if (Test-Path variable:warnAsError) { $warnAsError } else { $true } - -# Specifies which msbuild engine to use for build: 'vs', 'dotnet' or unspecified (determined based on presence of tools.vs in global.json). -[string]$msbuildEngine = if (Test-Path variable:msbuildEngine) { $msbuildEngine } else { $null } - -# True to attempt using .NET Core already that meets requirements specified in global.json -# installed on the machine instead of downloading one. -[bool]$useInstalledDotNetCli = if (Test-Path variable:useInstalledDotNetCli) { $useInstalledDotNetCli } else { $true } - -# Enable repos to use a particular version of the on-line dotnet-install scripts. -# default URL: https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.ps1 -[string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { 'v1' } - -# True to use global NuGet cache instead of restoring packages to repository-local directory. -[bool]$useGlobalNuGetCache = if (Test-Path variable:useGlobalNuGetCache) { $useGlobalNuGetCache } else { !$ci } - -# True to exclude prerelease versions Visual Studio during build -[bool]$excludePrereleaseVS = if (Test-Path variable:excludePrereleaseVS) { $excludePrereleaseVS } else { $false } - -# An array of names of processes to stop on script exit if prepareMachine is true. -$processesToStopOnExit = if (Test-Path variable:processesToStopOnExit) { $processesToStopOnExit } else { @('msbuild', 'dotnet', 'vbcscompiler') } - -$disableConfigureToolsetImport = if (Test-Path variable:disableConfigureToolsetImport) { $disableConfigureToolsetImport } else { $null } - -set-strictmode -version 2.0 -$ErrorActionPreference = 'Stop' -[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 - -# If specifies, provides an alternate path for getting .NET Core SDKs and Runtimes. This script will still try public sources first. -[string]$runtimeSourceFeed = if (Test-Path variable:runtimeSourceFeed) { $runtimeSourceFeed } else { $null } -# Base-64 encoded SAS token that has permission to storage container described by $runtimeSourceFeed -[string]$runtimeSourceFeedKey = if (Test-Path variable:runtimeSourceFeedKey) { $runtimeSourceFeedKey } else { $null } - -# True when the build is running within the VMR. -[bool]$fromVMR = if (Test-Path variable:fromVMR) { $fromVMR } else { $false } - -function Create-Directory ([string[]] $path) { - New-Item -Path $path -Force -ItemType 'Directory' | Out-Null -} - -function Unzip([string]$zipfile, [string]$outpath) { - Add-Type -AssemblyName System.IO.Compression.FileSystem - [System.IO.Compression.ZipFile]::ExtractToDirectory($zipfile, $outpath) -} - -# This will exec a process using the console and return it's exit code. -# This will not throw when the process fails. -# Returns process exit code. -function Exec-Process([string]$command, [string]$commandArgs) { - $startInfo = New-Object System.Diagnostics.ProcessStartInfo - $startInfo.FileName = $command - $startInfo.Arguments = $commandArgs - $startInfo.UseShellExecute = $false - $startInfo.WorkingDirectory = Get-Location - - $process = New-Object System.Diagnostics.Process - $process.StartInfo = $startInfo - $process.Start() | Out-Null - - $finished = $false - try { - while (-not $process.WaitForExit(100)) { - # Non-blocking loop done to allow ctr-c interrupts - } - - $finished = $true - return $global:LASTEXITCODE = $process.ExitCode - } - finally { - # If we didn't finish then an error occurred or the user hit ctrl-c. Either - # way kill the process - if (-not $finished) { - $process.Kill() - } - } -} - -# Take the given block, print it, print what the block probably references from the current set of -# variables using low-effort string matching, then run the block. -# -# This is intended to replace the pattern of manually copy-pasting a command, wrapping it in quotes, -# and printing it using "Write-Host". The copy-paste method is more readable in build logs, but less -# maintainable and less reliable. It is easy to make a mistake and modify the command without -# properly updating the "Write-Host" line, resulting in misleading build logs. The probability of -# this mistake makes the pattern hard to trust when it shows up in build logs. Finding the bug in -# existing source code can also be difficult, because the strings are not aligned to each other and -# the line may be 300+ columns long. -# -# By removing the need to maintain two copies of the command, Exec-BlockVerbosely avoids the issues. -# -# In Bash (or any posix-like shell), "set -x" prints usable verbose output automatically. -# "Set-PSDebug" appears to be similar at first glance, but unfortunately, it isn't very useful: it -# doesn't print any info about the variables being used by the command, which is normally the -# interesting part to diagnose. -function Exec-BlockVerbosely([scriptblock] $block) { - Write-Host "--- Running script block:" - $blockString = $block.ToString().Trim() - Write-Host $blockString - - Write-Host "--- List of variables that might be used:" - # For each variable x in the environment, check the block for a reference to x via simple "$x" or - # "@x" syntax. This doesn't detect other ways to reference variables ("${x}" nor "$variable:x", - # among others). It only catches what this function was originally written for: simple - # command-line commands. - $variableTable = Get-Variable | - Where-Object { - $blockString.Contains("`$$($_.Name)") -or $blockString.Contains("@$($_.Name)") - } | - Format-Table -AutoSize -HideTableHeaders -Wrap | - Out-String - Write-Host $variableTable.Trim() - - Write-Host "--- Executing:" - & $block - Write-Host "--- Done running script block!" -} - -# createSdkLocationFile parameter enables a file being generated under the toolset directory -# which writes the sdk's location into. This is only necessary for cmd --> powershell invocations -# as dot sourcing isn't possible. -function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) { - if (Test-Path variable:global:_DotNetInstallDir) { - return $global:_DotNetInstallDir - } - - # Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism - $env:DOTNET_MULTILEVEL_LOOKUP=0 - - # Disable first run since we do not need all ASP.NET packages restored. - $env:DOTNET_NOLOGO=1 - - # Disable telemetry on CI. - if ($ci) { - $env:DOTNET_CLI_TELEMETRY_OPTOUT=1 - } - - # Find the first path on %PATH% that contains the dotnet.exe - if ($useInstalledDotNetCli -and (-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -eq $null)) { - $dotnetExecutable = GetExecutableFileName 'dotnet' - $dotnetCmd = Get-Command $dotnetExecutable -ErrorAction SilentlyContinue - - if ($dotnetCmd -ne $null) { - $env:DOTNET_INSTALL_DIR = Split-Path $dotnetCmd.Path -Parent - } - } - - $dotnetSdkVersion = $GlobalJson.tools.dotnet - - # Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version, - # otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues. - if ((-not $globalJsonHasRuntimes) -and (-not [string]::IsNullOrEmpty($env:DOTNET_INSTALL_DIR)) -and (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$dotnetSdkVersion"))) { - $dotnetRoot = $env:DOTNET_INSTALL_DIR - } else { - $dotnetRoot = Join-Path $RepoRoot '.dotnet' - - if (-not (Test-Path(Join-Path $dotnetRoot "sdk\$dotnetSdkVersion"))) { - if ($install) { - InstallDotNetSdk $dotnetRoot $dotnetSdkVersion - } else { - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to find dotnet with SDK version '$dotnetSdkVersion'" - ExitWithExitCode 1 - } - } - - $env:DOTNET_INSTALL_DIR = $dotnetRoot - } - - # Creates a temporary file under the toolset dir. - # The following code block is protecting against concurrent access so that this function can - # be called in parallel. - if ($createSdkLocationFile) { - do { - $sdkCacheFileTemp = Join-Path $ToolsetDir $([System.IO.Path]::GetRandomFileName()) - } - until (!(Test-Path $sdkCacheFileTemp)) - Set-Content -Path $sdkCacheFileTemp -Value $dotnetRoot - - try { - Move-Item -Force $sdkCacheFileTemp (Join-Path $ToolsetDir 'sdk.txt') - } catch { - # Somebody beat us - Remove-Item -Path $sdkCacheFileTemp - } - } - - # Add dotnet to PATH. This prevents any bare invocation of dotnet in custom - # build steps from using anything other than what we've downloaded. - # It also ensures that VS msbuild will use the downloaded sdk targets. - $env:PATH = "$dotnetRoot;$env:PATH" - - # Make Sure that our bootstrapped dotnet cli is available in future steps of the Azure Pipelines build - Write-PipelinePrependPath -Path $dotnetRoot - - Write-PipelineSetVariable -Name 'DOTNET_MULTILEVEL_LOOKUP' -Value '0' - Write-PipelineSetVariable -Name 'DOTNET_NOLOGO' -Value '1' - - return $global:_DotNetInstallDir = $dotnetRoot -} - -function Retry($downloadBlock, $maxRetries = 5) { - $retries = 1 - - while($true) { - try { - & $downloadBlock - break - } - catch { - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_ - } - - if (++$retries -le $maxRetries) { - $delayInSeconds = [math]::Pow(2, $retries) - 1 # Exponential backoff - Write-Host "Retrying. Waiting for $delayInSeconds seconds before next attempt ($retries of $maxRetries)." - Start-Sleep -Seconds $delayInSeconds - } - else { - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to download file in $maxRetries attempts." - break - } - } -} - -function GetDotNetInstallScript([string] $dotnetRoot) { - $installScript = Join-Path $dotnetRoot 'dotnet-install.ps1' - if (!(Test-Path $installScript)) { - Create-Directory $dotnetRoot - $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit - $uri = "https://builds.dotnet.microsoft.com/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1" - - Retry({ - Write-Host "GET $uri" - Invoke-WebRequest $uri -OutFile $installScript - }) - } - - return $installScript -} - -function InstallDotNetSdk([string] $dotnetRoot, [string] $version, [string] $architecture = '', [switch] $noPath) { - InstallDotNet $dotnetRoot $version $architecture '' $false $runtimeSourceFeed $runtimeSourceFeedKey -noPath:$noPath -} - -function InstallDotNet([string] $dotnetRoot, - [string] $version, - [string] $architecture = '', - [string] $runtime = '', - [bool] $skipNonVersionedFiles = $false, - [string] $runtimeSourceFeed = '', - [string] $runtimeSourceFeedKey = '', - [switch] $noPath) { - - $dotnetVersionLabel = "'sdk v$version'" - - if ($runtime -ne '' -and $runtime -ne 'sdk') { - $runtimePath = $dotnetRoot - $runtimePath = $runtimePath + "\shared" - if ($runtime -eq "dotnet") { $runtimePath = $runtimePath + "\Microsoft.NETCore.App" } - if ($runtime -eq "aspnetcore") { $runtimePath = $runtimePath + "\Microsoft.AspNetCore.App" } - if ($runtime -eq "windowsdesktop") { $runtimePath = $runtimePath + "\Microsoft.WindowsDesktop.App" } - $runtimePath = $runtimePath + "\" + $version - - $dotnetVersionLabel = "runtime toolset '$runtime/$architecture v$version'" - - if (Test-Path $runtimePath) { - Write-Host " Runtime toolset '$runtime/$architecture v$version' already installed." - $installSuccess = $true - Exit - } - } - - $installScript = GetDotNetInstallScript $dotnetRoot - $installParameters = @{ - Version = $version - InstallDir = $dotnetRoot - } - - if ($architecture) { $installParameters.Architecture = $architecture } - if ($runtime) { $installParameters.Runtime = $runtime } - if ($skipNonVersionedFiles) { $installParameters.SkipNonVersionedFiles = $skipNonVersionedFiles } - if ($noPath) { $installParameters.NoPath = $True } - - $variations = @() - $variations += @($installParameters) - - $dotnetBuilds = $installParameters.Clone() - $dotnetbuilds.AzureFeed = "https://ci.dot.net/public" - $variations += @($dotnetBuilds) - - if ($runtimeSourceFeed) { - $runtimeSource = $installParameters.Clone() - $runtimeSource.AzureFeed = $runtimeSourceFeed - if ($runtimeSourceFeedKey) { - $decodedBytes = [System.Convert]::FromBase64String($runtimeSourceFeedKey) - $decodedString = [System.Text.Encoding]::UTF8.GetString($decodedBytes) - $runtimeSource.FeedCredential = $decodedString - } - $variations += @($runtimeSource) - } - - $installSuccess = $false - foreach ($variation in $variations) { - if ($variation | Get-Member AzureFeed) { - $location = $variation.AzureFeed - } else { - $location = "public location"; - } - Write-Host " Attempting to install $dotnetVersionLabel from $location." - try { - & $installScript @variation - $installSuccess = $true - break - } - catch { - Write-Host " Failed to install $dotnetVersionLabel from $location." - } - } - if (-not $installSuccess) { - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Failed to install $dotnetVersionLabel from any of the specified locations." - ExitWithExitCode 1 - } -} - -# -# Locates Visual Studio MSBuild installation. -# The preference order for MSBuild to use is as follows: -# -# 1. MSBuild from an active VS command prompt -# 2. MSBuild from a compatible VS installation -# 3. MSBuild from the xcopy tool package -# -# Returns full path to msbuild.exe. -# Throws on failure. -# -function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = $null) { - if (-not (IsWindowsPlatform)) { - throw "Cannot initialize Visual Studio on non-Windows" - } - - if (Test-Path variable:global:_MSBuildExe) { - return $global:_MSBuildExe - } - - # Minimum VS version to require. - $vsMinVersionReqdStr = '17.7' - $vsMinVersionReqd = [Version]::new($vsMinVersionReqdStr) - - # If the version of msbuild is going to be xcopied, - # use this version. Version matches a package here: - # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/Microsoft.DotNet.Arcade.MSBuild.Xcopy/versions/17.13.0 - $defaultXCopyMSBuildVersion = '17.13.0' - - if (!$vsRequirements) { - if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') { - $vsRequirements = $GlobalJson.tools.vs - } - else { - $vsRequirements = New-Object PSObject -Property @{ version = $vsMinVersionReqdStr } - } - } - $vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { $vsMinVersionReqdStr } - $vsMinVersion = [Version]::new($vsMinVersionStr) - - # Try msbuild command available in the environment. - if ($env:VSINSTALLDIR -ne $null) { - $msbuildCmd = Get-Command 'msbuild.exe' -ErrorAction SilentlyContinue - if ($msbuildCmd -ne $null) { - # Workaround for https://github.com/dotnet/roslyn/issues/35793 - # Due to this issue $msbuildCmd.Version returns 0.0.0.0 for msbuild.exe 16.2+ - $msbuildVersion = [Version]::new((Get-Item $msbuildCmd.Path).VersionInfo.ProductVersion.Split([char[]]@('-', '+'))[0]) - - if ($msbuildVersion -ge $vsMinVersion) { - return $global:_MSBuildExe = $msbuildCmd.Path - } - - # Report error - the developer environment is initialized with incompatible VS version. - throw "Developer Command Prompt for VS $($env:VisualStudioVersion) is not recent enough. Please upgrade to $vsMinVersionStr or build from a plain CMD window" - } - } - - # Locate Visual Studio installation or download x-copy msbuild. - $vsInfo = LocateVisualStudio $vsRequirements - if ($vsInfo -ne $null -and $env:ForceUseXCopyMSBuild -eq $null) { - # Ensure vsInstallDir has a trailing slash - $vsInstallDir = Join-Path $vsInfo.installationPath "\" - $vsMajorVersion = $vsInfo.installationVersion.Split('.')[0] - - InitializeVisualStudioEnvironmentVariables $vsInstallDir $vsMajorVersion - } else { - if (Get-Member -InputObject $GlobalJson.tools -Name 'xcopy-msbuild') { - $xcopyMSBuildVersion = $GlobalJson.tools.'xcopy-msbuild' - $vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0] - } else { - #if vs version provided in global.json is incompatible (too low) then use the default version for xcopy msbuild download - if($vsMinVersion -lt $vsMinVersionReqd){ - Write-Host "Using xcopy-msbuild version of $defaultXCopyMSBuildVersion since VS version $vsMinVersionStr provided in global.json is not compatible" - $xcopyMSBuildVersion = $defaultXCopyMSBuildVersion - $vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0] - } - else{ - # If the VS version IS compatible, look for an xcopy msbuild package - # with a version matching VS. - # Note: If this version does not exist, then an explicit version of xcopy msbuild - # can be specified in global.json. This will be required for pre-release versions of msbuild. - $vsMajorVersion = $vsMinVersion.Major - $vsMinorVersion = $vsMinVersion.Minor - $xcopyMSBuildVersion = "$vsMajorVersion.$vsMinorVersion.0" - } - } - - $vsInstallDir = $null - if ($xcopyMSBuildVersion.Trim() -ine "none") { - $vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install - if ($vsInstallDir -eq $null) { - throw "Could not xcopy msbuild. Please check that package 'Microsoft.DotNet.Arcade.MSBuild.Xcopy @ $xcopyMSBuildVersion' exists on feed 'dotnet-eng'." - } - } - if ($vsInstallDir -eq $null) { - throw 'Unable to find Visual Studio that has required version and components installed' - } - } - - $msbuildVersionDir = if ([int]$vsMajorVersion -lt 16) { "$vsMajorVersion.0" } else { "Current" } - - $local:BinFolder = Join-Path $vsInstallDir "MSBuild\$msbuildVersionDir\Bin" - $local:Prefer64bit = if (Get-Member -InputObject $vsRequirements -Name 'Prefer64bit') { $vsRequirements.Prefer64bit } else { $false } - if ($local:Prefer64bit -and (Test-Path(Join-Path $local:BinFolder "amd64"))) { - $global:_MSBuildExe = Join-Path $local:BinFolder "amd64\msbuild.exe" - } else { - $global:_MSBuildExe = Join-Path $local:BinFolder "msbuild.exe" - } - - return $global:_MSBuildExe -} - -function InitializeVisualStudioEnvironmentVariables([string] $vsInstallDir, [string] $vsMajorVersion) { - $env:VSINSTALLDIR = $vsInstallDir - Set-Item "env:VS$($vsMajorVersion)0COMNTOOLS" (Join-Path $vsInstallDir "Common7\Tools\") - - $vsSdkInstallDir = Join-Path $vsInstallDir "VSSDK\" - if (Test-Path $vsSdkInstallDir) { - Set-Item "env:VSSDK$($vsMajorVersion)0Install" $vsSdkInstallDir - $env:VSSDKInstall = $vsSdkInstallDir - } -} - -function InstallXCopyMSBuild([string]$packageVersion) { - return InitializeXCopyMSBuild $packageVersion -install $true -} - -function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) { - $packageName = 'Microsoft.DotNet.Arcade.MSBuild.Xcopy' - $packageDir = Join-Path $ToolsDir "msbuild\$packageVersion" - $packagePath = Join-Path $packageDir "$packageName.$packageVersion.nupkg" - - if (!(Test-Path $packageDir)) { - if (!$install) { - return $null - } - - Create-Directory $packageDir - - Write-Host "Downloading $packageName $packageVersion" - $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit - Retry({ - Invoke-WebRequest "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/flat2/$packageName/$packageVersion/$packageName.$packageVersion.nupkg" -OutFile $packagePath - }) - - if (!(Test-Path $packagePath)) { - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "See https://dev.azure.com/dnceng/internal/_wiki/wikis/DNCEng%20Services%20Wiki/1074/Updating-Microsoft.DotNet.Arcade.MSBuild.Xcopy-WAS-RoslynTools.MSBuild-(xcopy-msbuild)-generation?anchor=troubleshooting for help troubleshooting issues with XCopy MSBuild" - throw - } - Unzip $packagePath $packageDir - } - - return Join-Path $packageDir 'tools' -} - -# -# Locates Visual Studio instance that meets the minimal requirements specified by tools.vs object in global.json. -# -# The following properties of tools.vs are recognized: -# "version": "{major}.{minor}" -# Two part minimal VS version, e.g. "15.9", "16.0", etc. -# "components": ["componentId1", "componentId2", ...] -# Array of ids of workload components that must be available in the VS instance. -# See e.g. https://docs.microsoft.com/en-us/visualstudio/install/workload-component-id-vs-enterprise?view=vs-2017 -# -# Returns JSON describing the located VS instance (same format as returned by vswhere), -# or $null if no instance meeting the requirements is found on the machine. -# -function LocateVisualStudio([object]$vsRequirements = $null){ - if (-not (IsWindowsPlatform)) { - throw "Cannot run vswhere on non-Windows platforms." - } - - if (Get-Member -InputObject $GlobalJson.tools -Name 'vswhere') { - $vswhereVersion = $GlobalJson.tools.vswhere - } else { - $vswhereVersion = '2.5.2' - } - - $vsWhereDir = Join-Path $ToolsDir "vswhere\$vswhereVersion" - $vsWhereExe = Join-Path $vsWhereDir 'vswhere.exe' - - if (!(Test-Path $vsWhereExe)) { - Create-Directory $vsWhereDir - Write-Host 'Downloading vswhere' - Retry({ - Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe - }) - } - - if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs } - $args = @('-latest', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*') - - if (!$excludePrereleaseVS) { - $args += '-prerelease' - } - - if (Get-Member -InputObject $vsRequirements -Name 'version') { - $args += '-version' - $args += $vsRequirements.version - } - - if (Get-Member -InputObject $vsRequirements -Name 'components') { - foreach ($component in $vsRequirements.components) { - $args += '-requires' - $args += $component - } - } - - $vsInfo =& $vsWhereExe $args | ConvertFrom-Json - - if ($lastExitCode -ne 0) { - return $null - } - - # use first matching instance - return $vsInfo[0] -} - -function InitializeBuildTool() { - if (Test-Path variable:global:_BuildTool) { - # If the requested msbuild parameters do not match, clear the cached variables. - if($global:_BuildTool.Contains('ExcludePrereleaseVS') -and $global:_BuildTool.ExcludePrereleaseVS -ne $excludePrereleaseVS) { - Remove-Item variable:global:_BuildTool - Remove-Item variable:global:_MSBuildExe - } else { - return $global:_BuildTool - } - } - - if (-not $msbuildEngine) { - $msbuildEngine = GetDefaultMSBuildEngine - } - - # Initialize dotnet cli if listed in 'tools' - $dotnetRoot = $null - if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') { - $dotnetRoot = InitializeDotNetCli -install:$restore - } - - if ($msbuildEngine -eq 'dotnet') { - if (!$dotnetRoot) { - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "/global.json must specify 'tools.dotnet'." - ExitWithExitCode 1 - } - $dotnetPath = Join-Path $dotnetRoot (GetExecutableFileName 'dotnet') - - $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'net' } - } elseif ($msbuildEngine -eq "vs") { - try { - $msbuildPath = InitializeVisualStudioMSBuild -install:$restore - } catch { - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_ - ExitWithExitCode 1 - } - - $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "netframework"; ExcludePrereleaseVS = $excludePrereleaseVS } - } else { - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'." - ExitWithExitCode 1 - } - - return $global:_BuildTool = $buildTool -} - -function GetDefaultMSBuildEngine() { - # Presence of tools.vs indicates the repo needs to build using VS msbuild on Windows. - if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') { - return 'vs' - } - - if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') { - return 'dotnet' - } - - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "-msbuildEngine must be specified, or /global.json must specify 'tools.dotnet' or 'tools.vs'." - ExitWithExitCode 1 -} - -function GetNuGetPackageCachePath() { - if ($env:NUGET_PACKAGES -eq $null) { - # Use local cache on CI to ensure deterministic build. - # Avoid using the http cache as workaround for https://github.com/NuGet/Home/issues/3116 - # use global cache in dev builds to avoid cost of downloading packages. - # For directory normalization, see also: https://github.com/NuGet/Home/issues/7968 - if ($useGlobalNuGetCache) { - $env:NUGET_PACKAGES = Join-Path $env:UserProfile '.nuget\packages\' - } else { - $env:NUGET_PACKAGES = Join-Path $RepoRoot '.packages\' - } - } - - return $env:NUGET_PACKAGES -} - -# Returns a full path to an Arcade SDK task project file. -function GetSdkTaskProject([string]$taskName) { - return Join-Path (Split-Path (InitializeToolset) -Parent) "SdkTasks\$taskName.proj" -} - -function InitializeNativeTools() { - if (-Not (Test-Path variable:DisableNativeToolsetInstalls) -And (Get-Member -InputObject $GlobalJson -Name "native-tools")) { - $nativeArgs= @{} - if ($ci) { - $nativeArgs = @{ - InstallDirectory = "$ToolsDir" - } - } - if ($env:NativeToolsOnMachine) { - Write-Host "Variable NativeToolsOnMachine detected, enabling native tool path promotion..." - $nativeArgs += @{ PathPromotion = $true } - } - & "$PSScriptRoot/init-tools-native.ps1" @nativeArgs - } -} - -function Read-ArcadeSdkVersion() { - return $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk' -} - -function InitializeToolset() { - # For Unified Build/Source-build support, check whether the environment variable is - # set. If it is, then use this as the toolset build project. - if ($env:_InitializeToolset -ne $null) { - return $global:_InitializeToolset = $env:_InitializeToolset - } - - if (Test-Path variable:global:_InitializeToolset) { - return $global:_InitializeToolset - } - - $nugetCache = GetNuGetPackageCachePath - - $toolsetVersion = Read-ArcadeSdkVersion - $toolsetLocationFile = Join-Path $ToolsetDir "$toolsetVersion.txt" - - if (Test-Path $toolsetLocationFile) { - $path = Get-Content $toolsetLocationFile -TotalCount 1 - if (Test-Path $path) { - return $global:_InitializeToolset = $path - } - } - - if (-not $restore) { - Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Toolset version $toolsetVersion has not been restored." - ExitWithExitCode 1 - } - - $buildTool = InitializeBuildTool - - $proj = Join-Path $ToolsetDir 'restore.proj' - $bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'ToolsetRestore.binlog') } else { '' } - - '' | Set-Content $proj - - MSBuild-Core $proj $bl /t:__WriteToolsetLocation /clp:ErrorsOnly`;NoSummary /p:__ToolsetLocationOutputFile=$toolsetLocationFile - - $path = Get-Content $toolsetLocationFile -Encoding UTF8 -TotalCount 1 - if (!(Test-Path $path)) { - throw "Invalid toolset path: $path" - } - - return $global:_InitializeToolset = $path -} - -function ExitWithExitCode([int] $exitCode) { - if ($ci -and $prepareMachine) { - Stop-Processes - } - exit $exitCode -} - -# Check if $LASTEXITCODE is a nonzero exit code (NZEC). If so, print a Azure Pipeline error for -# diagnostics, then exit the script with the $LASTEXITCODE. -function Exit-IfNZEC([string] $category = "General") { - Write-Host "Exit code $LASTEXITCODE" - if ($LASTEXITCODE -ne 0) { - $message = "Last command failed with exit code $LASTEXITCODE." - Write-PipelineTelemetryError -Force -Category $category -Message $message - ExitWithExitCode $LASTEXITCODE - } -} - -function Stop-Processes() { - Write-Host 'Killing running build processes...' - foreach ($processName in $processesToStopOnExit) { - Get-Process -Name $processName -ErrorAction SilentlyContinue | Stop-Process - } -} - -# -# Executes msbuild (or 'dotnet msbuild') with arguments passed to the function. -# The arguments are automatically quoted. -# Terminates the script if the build fails. -# -function MSBuild() { - if ($pipelinesLog) { - $buildTool = InitializeBuildTool - - if ($ci -and $buildTool.Tool -eq 'dotnet') { - $env:NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS = 20 - $env:NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS = 20 - Write-PipelineSetVariable -Name 'NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS' -Value '20' - Write-PipelineSetVariable -Name 'NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS' -Value '20' - } - - Enable-Nuget-EnhancedRetry - - $toolsetBuildProject = InitializeToolset - $basePath = Split-Path -parent $toolsetBuildProject - $selectedPath = Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll') - - if (-not $selectedPath) { - Write-PipelineTelemetryError -Category 'Build' -Message "Unable to find arcade sdk logger assembly: $selectedPath" - ExitWithExitCode 1 - } - - $args += "/logger:$selectedPath" - } - - MSBuild-Core @args -} - -# -# Executes msbuild (or 'dotnet msbuild') with arguments passed to the function. -# The arguments are automatically quoted. -# Terminates the script if the build fails. -# -function MSBuild-Core() { - if ($ci) { - if (!$binaryLog -and !$excludeCIBinarylog) { - Write-PipelineTelemetryError -Category 'Build' -Message 'Binary log must be enabled in CI build, or explicitly opted-out from with the -excludeCIBinarylog switch.' - ExitWithExitCode 1 - } - - if ($nodeReuse) { - Write-PipelineTelemetryError -Category 'Build' -Message 'Node reuse must be disabled in CI build.' - ExitWithExitCode 1 - } - } - - Enable-Nuget-EnhancedRetry - - $buildTool = InitializeBuildTool - - $cmdArgs = "$($buildTool.Command) /m /nologo /clp:Summary /v:$verbosity /nr:$nodeReuse /p:ContinuousIntegrationBuild=$ci" - - if ($warnAsError) { - $cmdArgs += ' /warnaserror /p:TreatWarningsAsErrors=true' - } - else { - $cmdArgs += ' /p:TreatWarningsAsErrors=false' - } - - foreach ($arg in $args) { - if ($null -ne $arg -and $arg.Trim() -ne "") { - if ($arg.EndsWith('\')) { - $arg = $arg + "\" - } - $cmdArgs += " `"$arg`"" - } - } - - # Be sure quote the path in case there are spaces in the dotnet installation location. - $env:ARCADE_BUILD_TOOL_COMMAND = "`"$($buildTool.Path)`" $cmdArgs" - - $exitCode = Exec-Process $buildTool.Path $cmdArgs - - if ($exitCode -ne 0) { - # We should not Write-PipelineTaskError here because that message shows up in the build summary - # The build already logged an error, that's the reason it failed. Producing an error here only adds noise. - Write-Host "Build failed with exit code $exitCode. Check errors above." -ForegroundColor Red - - $buildLog = GetMSBuildBinaryLogCommandLineArgument $args - if ($null -ne $buildLog) { - Write-Host "See log: $buildLog" -ForegroundColor DarkGray - } - - # When running on Azure Pipelines, override the returned exit code to avoid double logging. - # Skip this when the build is a child of the VMR build. - if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null -and !$fromVMR) { - Write-PipelineSetResult -Result "Failed" -Message "msbuild execution failed." - # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error - # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error - ExitWithExitCode 0 - } else { - ExitWithExitCode $exitCode - } - } -} - -function GetMSBuildBinaryLogCommandLineArgument($arguments) { - foreach ($argument in $arguments) { - if ($argument -ne $null) { - $arg = $argument.Trim() - if ($arg.StartsWith('/bl:', "OrdinalIgnoreCase")) { - return $arg.Substring('/bl:'.Length) - } - - if ($arg.StartsWith('/binaryLogger:', 'OrdinalIgnoreCase')) { - return $arg.Substring('/binaryLogger:'.Length) - } - } - } - - return $null -} - -function GetExecutableFileName($baseName) { - if (IsWindowsPlatform) { - return "$baseName.exe" - } - else { - return $baseName - } -} - -function IsWindowsPlatform() { - return [environment]::OSVersion.Platform -eq [PlatformID]::Win32NT -} - -function Get-Darc($version) { - $darcPath = "$TempDir\darc\$([guid]::NewGuid())" - if ($version -ne $null) { - & $PSScriptRoot\darc-init.ps1 -toolpath $darcPath -darcVersion $version | Out-Host - } else { - & $PSScriptRoot\darc-init.ps1 -toolpath $darcPath | Out-Host - } - return "$darcPath\darc.exe" -} - -. $PSScriptRoot\pipeline-logging-functions.ps1 - -$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot '..\..\') -$EngRoot = Resolve-Path (Join-Path $PSScriptRoot '..') -$ArtifactsDir = Join-Path $RepoRoot 'artifacts' -$ToolsetDir = Join-Path $ArtifactsDir 'toolset' -$ToolsDir = Join-Path $RepoRoot '.tools' -$LogDir = Join-Path (Join-Path $ArtifactsDir 'log') $configuration -$TempDir = Join-Path (Join-Path $ArtifactsDir 'tmp') $configuration -$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot 'global.json') | ConvertFrom-Json -# true if global.json contains a "runtimes" section -$globalJsonHasRuntimes = if ($GlobalJson.tools.PSObject.Properties.Name -Match 'runtimes') { $true } else { $false } - -Create-Directory $ToolsetDir -Create-Directory $TempDir -Create-Directory $LogDir - -Write-PipelineSetVariable -Name 'Artifacts' -Value $ArtifactsDir -Write-PipelineSetVariable -Name 'Artifacts.Toolset' -Value $ToolsetDir -Write-PipelineSetVariable -Name 'Artifacts.Log' -Value $LogDir -Write-PipelineSetVariable -Name 'TEMP' -Value $TempDir -Write-PipelineSetVariable -Name 'TMP' -Value $TempDir - -# Import custom tools configuration, if present in the repo. -# Note: Import in global scope so that the script set top-level variables without qualification. -if (!$disableConfigureToolsetImport) { - $configureToolsetScript = Join-Path $EngRoot 'configure-toolset.ps1' - if (Test-Path $configureToolsetScript) { - . $configureToolsetScript - if ((Test-Path variable:failOnConfigureToolsetError) -And $failOnConfigureToolsetError) { - if ((Test-Path variable:LastExitCode) -And ($LastExitCode -ne 0)) { - Write-PipelineTelemetryError -Category 'Build' -Message 'configure-toolset.ps1 returned a non-zero exit code' - ExitWithExitCode $LastExitCode - } - } - } -} - -# -# If $ci flag is set, turn on (and log that we did) special environment variables for improved Nuget client retry logic. -# -function Enable-Nuget-EnhancedRetry() { - if ($ci) { - Write-Host "Setting NUGET enhanced retry environment variables" - $env:NUGET_ENABLE_ENHANCED_HTTP_RETRY = 'true' - $env:NUGET_ENHANCED_MAX_NETWORK_TRY_COUNT = 6 - $env:NUGET_ENHANCED_NETWORK_RETRY_DELAY_MILLISECONDS = 1000 - $env:NUGET_RETRY_HTTP_429 = 'true' - Write-PipelineSetVariable -Name 'NUGET_ENABLE_ENHANCED_HTTP_RETRY' -Value 'true' - Write-PipelineSetVariable -Name 'NUGET_ENHANCED_MAX_NETWORK_TRY_COUNT' -Value '6' - Write-PipelineSetVariable -Name 'NUGET_ENHANCED_NETWORK_RETRY_DELAY_MILLISECONDS' -Value '1000' - Write-PipelineSetVariable -Name 'NUGET_RETRY_HTTP_429' -Value 'true' - } -} diff --git a/eng/common/tools.sh b/eng/common/tools.sh deleted file mode 100755 index 3def02a63..000000000 --- a/eng/common/tools.sh +++ /dev/null @@ -1,580 +0,0 @@ -#!/usr/bin/env bash - -# Initialize variables if they aren't already defined. - -# CI mode - set to true on CI server for PR validation build or official build. -ci=${ci:-false} - -# Build mode -source_build=${source_build:-false} - -# Set to true to use the pipelines logger which will enable Azure logging output. -# https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md -# This flag is meant as a temporary opt-opt for the feature while validate it across -# our consumers. It will be deleted in the future. -if [[ "$ci" == true ]]; then - pipelines_log=${pipelines_log:-true} -else - pipelines_log=${pipelines_log:-false} -fi - -# Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names. -configuration=${configuration:-'Debug'} - -# Set to true to opt out of outputting binary log while running in CI -exclude_ci_binary_log=${exclude_ci_binary_log:-false} - -if [[ "$ci" == true && "$exclude_ci_binary_log" == false ]]; then - binary_log_default=true -else - binary_log_default=false -fi - -# Set to true to output binary log from msbuild. Note that emitting binary log slows down the build. -binary_log=${binary_log:-$binary_log_default} - -# Turns on machine preparation/clean up code that changes the machine state (e.g. kills build processes). -prepare_machine=${prepare_machine:-false} - -# True to restore toolsets and dependencies. -restore=${restore:-true} - -# Adjusts msbuild verbosity level. -verbosity=${verbosity:-'minimal'} - -# Set to true to reuse msbuild nodes. Recommended to not reuse on CI. -if [[ "$ci" == true ]]; then - node_reuse=${node_reuse:-false} -else - node_reuse=${node_reuse:-true} -fi - -# Configures warning treatment in msbuild. -warn_as_error=${warn_as_error:-true} - -# True to attempt using .NET Core already that meets requirements specified in global.json -# installed on the machine instead of downloading one. -use_installed_dotnet_cli=${use_installed_dotnet_cli:-true} - -# Enable repos to use a particular version of the on-line dotnet-install scripts. -# default URL: https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.sh -dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'} - -# True to use global NuGet cache instead of restoring packages to repository-local directory. -# Keep in sync with NuGetPackageroot in Arcade SDK's RepositoryLayout.props. -if [[ "$ci" == true || "$source_build" == true ]]; then - use_global_nuget_cache=${use_global_nuget_cache:-false} -else - use_global_nuget_cache=${use_global_nuget_cache:-true} -fi - -# Used when restoring .NET SDK from alternative feeds -runtime_source_feed=${runtime_source_feed:-''} -runtime_source_feed_key=${runtime_source_feed_key:-''} - -# True when the build is running within the VMR. -from_vmr=${from_vmr:-false} - -# Resolve any symlinks in the given path. -function ResolvePath { - local path=$1 - - while [[ -h $path ]]; do - local dir="$( cd -P "$( dirname "$path" )" && pwd )" - path="$(readlink "$path")" - - # if $path was a relative symlink, we need to resolve it relative to the path where the - # symlink file was located - [[ $path != /* ]] && path="$dir/$path" - done - - # return value - _ResolvePath="$path" -} - -# ReadVersionFromJson [json key] -function ReadGlobalVersion { - local key=$1 - - if command -v jq &> /dev/null; then - _ReadGlobalVersion="$(jq -r ".[] | select(has(\"$key\")) | .\"$key\"" "$global_json_file")" - elif [[ "$(cat "$global_json_file")" =~ \"$key\"[[:space:]\:]*\"([^\"]+) ]]; then - _ReadGlobalVersion=${BASH_REMATCH[1]} - fi - - if [[ -z "$_ReadGlobalVersion" ]]; then - Write-PipelineTelemetryError -category 'Build' "Error: Cannot find \"$key\" in $global_json_file" - ExitWithExitCode 1 - fi -} - -function InitializeDotNetCli { - if [[ -n "${_InitializeDotNetCli:-}" ]]; then - return - fi - - local install=$1 - - # Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism - export DOTNET_MULTILEVEL_LOOKUP=0 - - # Disable first run since we want to control all package sources - export DOTNET_NOLOGO=1 - - # Disable telemetry on CI - if [[ $ci == true ]]; then - export DOTNET_CLI_TELEMETRY_OPTOUT=1 - fi - - # LTTNG is the logging infrastructure used by Core CLR. Need this variable set - # so it doesn't output warnings to the console. - export LTTNG_HOME="$HOME" - - # Find the first path on $PATH that contains the dotnet.exe - if [[ "$use_installed_dotnet_cli" == true && $global_json_has_runtimes == false && -z "${DOTNET_INSTALL_DIR:-}" ]]; then - local dotnet_path=`command -v dotnet` - if [[ -n "$dotnet_path" ]]; then - ResolvePath "$dotnet_path" - export DOTNET_INSTALL_DIR=`dirname "$_ResolvePath"` - fi - fi - - ReadGlobalVersion "dotnet" - local dotnet_sdk_version=$_ReadGlobalVersion - local dotnet_root="" - - # Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version, - # otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues. - if [[ $global_json_has_runtimes == false && -n "${DOTNET_INSTALL_DIR:-}" && -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then - dotnet_root="$DOTNET_INSTALL_DIR" - else - dotnet_root="${repo_root}.dotnet" - - export DOTNET_INSTALL_DIR="$dotnet_root" - - if [[ ! -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then - if [[ "$install" == true ]]; then - InstallDotNetSdk "$dotnet_root" "$dotnet_sdk_version" - else - Write-PipelineTelemetryError -category 'InitializeToolset' "Unable to find dotnet with SDK version '$dotnet_sdk_version'" - ExitWithExitCode 1 - fi - fi - fi - - # Add dotnet to PATH. This prevents any bare invocation of dotnet in custom - # build steps from using anything other than what we've downloaded. - Write-PipelinePrependPath -path "$dotnet_root" - - Write-PipelineSetVariable -name "DOTNET_MULTILEVEL_LOOKUP" -value "0" - Write-PipelineSetVariable -name "DOTNET_NOLOGO" -value "1" - - # return value - _InitializeDotNetCli="$dotnet_root" -} - -function InstallDotNetSdk { - local root=$1 - local version=$2 - local architecture="unset" - if [[ $# -ge 3 ]]; then - architecture=$3 - fi - InstallDotNet "$root" "$version" $architecture 'sdk' 'true' $runtime_source_feed $runtime_source_feed_key -} - -function InstallDotNet { - local root=$1 - local version=$2 - local runtime=$4 - - local dotnetVersionLabel="'$runtime v$version'" - if [[ -n "${4:-}" ]] && [ "$4" != 'sdk' ]; then - runtimePath="$root" - runtimePath="$runtimePath/shared" - case "$runtime" in - dotnet) - runtimePath="$runtimePath/Microsoft.NETCore.App" - ;; - aspnetcore) - runtimePath="$runtimePath/Microsoft.AspNetCore.App" - ;; - windowsdesktop) - runtimePath="$runtimePath/Microsoft.WindowsDesktop.App" - ;; - *) - ;; - esac - runtimePath="$runtimePath/$version" - - dotnetVersionLabel="runtime toolset '$runtime/$architecture v$version'" - - if [ -d "$runtimePath" ]; then - echo " Runtime toolset '$runtime/$architecture v$version' already installed." - local installSuccess=1 - return - fi - fi - - GetDotNetInstallScript "$root" - local install_script=$_GetDotNetInstallScript - - local installParameters=(--version $version --install-dir "$root") - - if [[ -n "${3:-}" ]] && [ "$3" != 'unset' ]; then - installParameters+=(--architecture $3) - fi - if [[ -n "${4:-}" ]] && [ "$4" != 'sdk' ]; then - installParameters+=(--runtime $4) - fi - if [[ "$#" -ge "5" ]] && [[ "$5" != 'false' ]]; then - installParameters+=(--skip-non-versioned-files) - fi - - local variations=() # list of variable names with parameter arrays in them - - local public_location=("${installParameters[@]}") - variations+=(public_location) - - local dotnetbuilds=("${installParameters[@]}" --azure-feed "https://ci.dot.net/public") - variations+=(dotnetbuilds) - - if [[ -n "${6:-}" ]]; then - variations+=(private_feed) - local private_feed=("${installParameters[@]}" --azure-feed $6) - if [[ -n "${7:-}" ]]; then - # The 'base64' binary on alpine uses '-d' and doesn't support '--decode' - # '-d'. To work around this, do a simple detection and switch the parameter - # accordingly. - decodeArg="--decode" - if base64 --help 2>&1 | grep -q "BusyBox"; then - decodeArg="-d" - fi - decodedFeedKey=`echo $7 | base64 $decodeArg` - private_feed+=(--feed-credential $decodedFeedKey) - fi - fi - - local installSuccess=0 - for variationName in "${variations[@]}"; do - local name="$variationName[@]" - local variation=("${!name}") - echo " Attempting to install $dotnetVersionLabel from $variationName." - bash "$install_script" "${variation[@]}" && installSuccess=1 - if [[ "$installSuccess" -eq 1 ]]; then - break - fi - - echo " Failed to install $dotnetVersionLabel from $variationName." - done - - if [[ "$installSuccess" -eq 0 ]]; then - Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to install $dotnetVersionLabel from any of the specified locations." - ExitWithExitCode 1 - fi -} - -function with_retries { - local maxRetries=5 - local retries=1 - echo "Trying to run '$@' for maximum of $maxRetries attempts." - while [[ $((retries++)) -le $maxRetries ]]; do - "$@" - - if [[ $? == 0 ]]; then - echo "Ran '$@' successfully." - return 0 - fi - - timeout=$((3**$retries-1)) - echo "Failed to execute '$@'. Waiting $timeout seconds before next attempt ($retries out of $maxRetries)." 1>&2 - sleep $timeout - done - - echo "Failed to execute '$@' for $maxRetries times." 1>&2 - - return 1 -} - -function GetDotNetInstallScript { - local root=$1 - local install_script="$root/dotnet-install.sh" - local install_script_url="https://builds.dotnet.microsoft.com/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh" - - if [[ ! -a "$install_script" ]]; then - mkdir -p "$root" - - echo "Downloading '$install_script_url'" - - # Use curl if available, otherwise use wget - if command -v curl > /dev/null; then - # first, try directly, if this fails we will retry with verbose logging - curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script" || { - if command -v openssl &> /dev/null; then - echo "Curl failed; dumping some information about dotnet.microsoft.com for later investigation" - echo | openssl s_client -showcerts -servername dotnet.microsoft.com -connect dotnet.microsoft.com:443 || true - fi - echo "Will now retry the same URL with verbose logging." - with_retries curl "$install_script_url" -sSL --verbose --retry 10 --create-dirs -o "$install_script" || { - local exit_code=$? - Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')." - ExitWithExitCode $exit_code - } - } - else - with_retries wget -v -O "$install_script" "$install_script_url" || { - local exit_code=$? - Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')." - ExitWithExitCode $exit_code - } - fi - fi - # return value - _GetDotNetInstallScript="$install_script" -} - -function InitializeBuildTool { - if [[ -n "${_InitializeBuildTool:-}" ]]; then - return - fi - - InitializeDotNetCli $restore - - # return values - _InitializeBuildTool="$_InitializeDotNetCli/dotnet" - _InitializeBuildToolCommand="msbuild" -} - -function GetNuGetPackageCachePath { - if [[ -z ${NUGET_PACKAGES:-} ]]; then - if [[ "$use_global_nuget_cache" == true ]]; then - export NUGET_PACKAGES="$HOME/.nuget/packages/" - else - export NUGET_PACKAGES="$repo_root/.packages/" - fi - fi - - # return value - _GetNuGetPackageCachePath=$NUGET_PACKAGES -} - -function InitializeNativeTools() { - if [[ -n "${DisableNativeToolsetInstalls:-}" ]]; then - return - fi - if grep -Fq "native-tools" $global_json_file - then - local nativeArgs="" - if [[ "$ci" == true ]]; then - nativeArgs="--installDirectory $tools_dir" - fi - "$_script_dir/init-tools-native.sh" $nativeArgs - fi -} - -function InitializeToolset { - if [[ -n "${_InitializeToolset:-}" ]]; then - return - fi - - GetNuGetPackageCachePath - - ReadGlobalVersion "Microsoft.DotNet.Arcade.Sdk" - - local toolset_version=$_ReadGlobalVersion - local toolset_location_file="$toolset_dir/$toolset_version.txt" - - if [[ -a "$toolset_location_file" ]]; then - local path=`cat "$toolset_location_file"` - if [[ -a "$path" ]]; then - # return value - _InitializeToolset="$path" - return - fi - fi - - if [[ "$restore" != true ]]; then - Write-PipelineTelemetryError -category 'InitializeToolset' "Toolset version $toolset_version has not been restored." - ExitWithExitCode 2 - fi - - local proj="$toolset_dir/restore.proj" - - local bl="" - if [[ "$binary_log" == true ]]; then - bl="/bl:$log_dir/ToolsetRestore.binlog" - fi - - echo '' > "$proj" - MSBuild-Core "$proj" $bl /t:__WriteToolsetLocation /clp:ErrorsOnly\;NoSummary /p:__ToolsetLocationOutputFile="$toolset_location_file" - - local toolset_build_proj=`cat "$toolset_location_file"` - - if [[ ! -a "$toolset_build_proj" ]]; then - Write-PipelineTelemetryError -category 'Build' "Invalid toolset path: $toolset_build_proj" - ExitWithExitCode 3 - fi - - # return value - _InitializeToolset="$toolset_build_proj" -} - -function ExitWithExitCode { - if [[ "$ci" == true && "$prepare_machine" == true ]]; then - StopProcesses - fi - exit $1 -} - -function StopProcesses { - echo "Killing running build processes..." - pkill -9 "dotnet" || true - pkill -9 "vbcscompiler" || true - return 0 -} - -function MSBuild { - local args=( "$@" ) - if [[ "$pipelines_log" == true ]]; then - InitializeBuildTool - InitializeToolset - - if [[ "$ci" == true ]]; then - export NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS=20 - export NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS=20 - Write-PipelineSetVariable -name "NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS" -value "20" - Write-PipelineSetVariable -name "NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS" -value "20" - fi - - local toolset_dir="${_InitializeToolset%/*}" - local selectedPath="$toolset_dir/net/Microsoft.DotNet.ArcadeLogging.dll" - - if [[ -z "$selectedPath" ]]; then - Write-PipelineTelemetryError -category 'Build' "Unable to find arcade sdk logger assembly: $selectedPath" - ExitWithExitCode 1 - fi - - args+=( "-logger:$selectedPath" ) - fi - - MSBuild-Core "${args[@]}" -} - -function MSBuild-Core { - if [[ "$ci" == true ]]; then - if [[ "$binary_log" != true && "$exclude_ci_binary_log" != true ]]; then - Write-PipelineTelemetryError -category 'Build' "Binary log must be enabled in CI build, or explicitly opted-out from with the -noBinaryLog switch." - ExitWithExitCode 1 - fi - - if [[ "$node_reuse" == true ]]; then - Write-PipelineTelemetryError -category 'Build' "Node reuse must be disabled in CI build." - ExitWithExitCode 1 - fi - fi - - InitializeBuildTool - - local warnaserror_switch="" - if [[ $warn_as_error == true ]]; then - warnaserror_switch="/warnaserror" - fi - - function RunBuildTool { - export ARCADE_BUILD_TOOL_COMMAND="$_InitializeBuildTool $@" - - "$_InitializeBuildTool" "$@" || { - local exit_code=$? - # We should not Write-PipelineTaskError here because that message shows up in the build summary - # The build already logged an error, that's the reason it failed. Producing an error here only adds noise. - echo "Build failed with exit code $exit_code. Check errors above." - - # When running on Azure Pipelines, override the returned exit code to avoid double logging. - # Skip this when the build is a child of the VMR build. - if [[ "$ci" == true && -n ${SYSTEM_TEAMPROJECT:-} && "$from_vmr" != true ]]; then - Write-PipelineSetResult -result "Failed" -message "msbuild execution failed." - # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error - # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error - ExitWithExitCode 0 - else - ExitWithExitCode $exit_code - fi - } - } - - RunBuildTool "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@" -} - -function GetDarc { - darc_path="$temp_dir/darc" - version="$1" - - if [[ -n "$version" ]]; then - version="--darcversion $version" - fi - - "$eng_root/common/darc-init.sh" --toolpath "$darc_path" $version - darc_tool="$darc_path/darc" -} - -# Returns a full path to an Arcade SDK task project file. -function GetSdkTaskProject { - taskName=$1 - echo "$(dirname $_InitializeToolset)/SdkTasks/$taskName.proj" -} - -ResolvePath "${BASH_SOURCE[0]}" -_script_dir=`dirname "$_ResolvePath"` - -. "$_script_dir/pipeline-logging-functions.sh" - -eng_root=`cd -P "$_script_dir/.." && pwd` -repo_root=`cd -P "$_script_dir/../.." && pwd` -repo_root="${repo_root}/" -artifacts_dir="${repo_root}artifacts" -toolset_dir="$artifacts_dir/toolset" -tools_dir="${repo_root}.tools" -log_dir="$artifacts_dir/log/$configuration" -temp_dir="$artifacts_dir/tmp/$configuration" - -global_json_file="${repo_root}global.json" -# determine if global.json contains a "runtimes" entry -global_json_has_runtimes=false -if command -v jq &> /dev/null; then - if jq -e '.tools | has("runtimes")' "$global_json_file" &> /dev/null; then - global_json_has_runtimes=true - fi -elif [[ "$(cat "$global_json_file")" =~ \"runtimes\"[[:space:]\:]*\{ ]]; then - global_json_has_runtimes=true -fi - -# HOME may not be defined in some scenarios, but it is required by NuGet -if [[ -z $HOME ]]; then - export HOME="${repo_root}artifacts/.home/" - mkdir -p "$HOME" -fi - -mkdir -p "$toolset_dir" -mkdir -p "$temp_dir" -mkdir -p "$log_dir" - -Write-PipelineSetVariable -name "Artifacts" -value "$artifacts_dir" -Write-PipelineSetVariable -name "Artifacts.Toolset" -value "$toolset_dir" -Write-PipelineSetVariable -name "Artifacts.Log" -value "$log_dir" -Write-PipelineSetVariable -name "Temp" -value "$temp_dir" -Write-PipelineSetVariable -name "TMP" -value "$temp_dir" - -# Import custom tools configuration, if present in the repo. -if [ -z "${disable_configure_toolset_import:-}" ]; then - configure_toolset_script="$eng_root/configure-toolset.sh" - if [[ -a "$configure_toolset_script" ]]; then - . "$configure_toolset_script" - fi -fi - -# TODO: https://github.com/dotnet/arcade/issues/1468 -# Temporary workaround to avoid breaking change. -# Remove once repos are updated. -if [[ -n "${useInstalledDotNetCli:-}" ]]; then - use_installed_dotnet_cli="$useInstalledDotNetCli" -fi diff --git a/eng/common/vmr-sync.ps1 b/eng/common/vmr-sync.ps1 deleted file mode 100644 index 97302f320..000000000 --- a/eng/common/vmr-sync.ps1 +++ /dev/null @@ -1,138 +0,0 @@ -<# -.SYNOPSIS - -This script is used for synchronizing the current repository into a local VMR. -It pulls the current repository's code into the specified VMR directory for local testing or -Source-Build validation. - -.DESCRIPTION - -The tooling used for synchronization will clone the VMR repository into a temporary folder if -it does not already exist. These clones can be reused in future synchronizations, so it is -recommended to dedicate a folder for this to speed up re-runs. - -.EXAMPLE - Synchronize current repository into a local VMR: - ./vmr-sync.ps1 -vmrDir "$HOME/repos/dotnet" -tmpDir "$HOME/repos/tmp" - -.PARAMETER tmpDir -Required. Path to the temporary folder where repositories will be cloned - -.PARAMETER vmrBranch -Optional. Branch of the 'dotnet/dotnet' repo to synchronize. The VMR will be checked out to this branch - -.PARAMETER azdevPat -Optional. Azure DevOps PAT to use for cloning private repositories. - -.PARAMETER vmrDir -Optional. Path to the dotnet/dotnet repository. When null, gets cloned to the temporary folder - -.PARAMETER debugOutput -Optional. Enables debug logging in the darc vmr command. - -.PARAMETER ci -Optional. Denotes that the script is running in a CI environment. -#> -param ( - [Parameter(Mandatory=$true, HelpMessage="Path to the temporary folder where repositories will be cloned")] - [string][Alias('t', 'tmp')]$tmpDir, - [string][Alias('b', 'branch')]$vmrBranch, - [string]$remote, - [string]$azdevPat, - [string][Alias('v', 'vmr')]$vmrDir, - [switch]$ci, - [switch]$debugOutput -) - -function Fail { - Write-Host "> $($args[0])" -ForegroundColor 'Red' -} - -function Highlight { - Write-Host "> $($args[0])" -ForegroundColor 'Cyan' -} - -$verbosity = 'verbose' -if ($debugOutput) { - $verbosity = 'debug' -} -# Validation - -if (-not $tmpDir) { - Fail "Missing -tmpDir argument. Please specify the path to the temporary folder where the repositories will be cloned" - exit 1 -} - -# Sanitize the input - -if (-not $vmrDir) { - $vmrDir = Join-Path $tmpDir 'dotnet' -} - -if (-not (Test-Path -Path $tmpDir -PathType Container)) { - New-Item -ItemType Directory -Path $tmpDir | Out-Null -} - -# Prepare the VMR - -if (-not (Test-Path -Path $vmrDir -PathType Container)) { - Highlight "Cloning 'dotnet/dotnet' into $vmrDir.." - git clone https://github.com/dotnet/dotnet $vmrDir - - if ($vmrBranch) { - git -C $vmrDir switch -c $vmrBranch - } -} -else { - if ((git -C $vmrDir diff --quiet) -eq $false) { - Fail "There are changes in the working tree of $vmrDir. Please commit or stash your changes" - exit 1 - } - - if ($vmrBranch) { - Highlight "Preparing $vmrDir" - git -C $vmrDir checkout $vmrBranch - git -C $vmrDir pull - } -} - -Set-StrictMode -Version Latest - -# Prepare darc - -Highlight 'Installing .NET, preparing the tooling..' -. .\eng\common\tools.ps1 -$dotnetRoot = InitializeDotNetCli -install:$true -$darc = Get-Darc -$dotnet = "$dotnetRoot\dotnet.exe" - -Highlight "Starting the synchronization of VMR.." - -# Synchronize the VMR -$darcArgs = ( - "vmr", "forwardflow", - "--tmp", $tmpDir, - "--$verbosity", - $vmrDir -) - -if ($ci) { - $darcArgs += ("--ci") -} - -if ($azdevPat) { - $darcArgs += ("--azdev-pat", $azdevPat) -} - -& "$darc" $darcArgs - -if ($LASTEXITCODE -eq 0) { - Highlight "Synchronization succeeded" -} -else { - Fail "Synchronization of repo to VMR failed!" - Fail "'$vmrDir' is left in its last state (re-run of this script will reset it)." - Fail "Please inspect the logs which contain path to the failing patch file (use -debugOutput to get all the details)." - Fail "Once you make changes to the conflicting VMR patch, commit it locally and re-run this script." - exit 1 -} diff --git a/eng/common/vmr-sync.sh b/eng/common/vmr-sync.sh deleted file mode 100644 index 44239e331..000000000 --- a/eng/common/vmr-sync.sh +++ /dev/null @@ -1,207 +0,0 @@ -#!/bin/bash - -### This script is used for synchronizing the current repository into a local VMR. -### It pulls the current repository's code into the specified VMR directory for local testing or -### Source-Build validation. -### -### The tooling used for synchronization will clone the VMR repository into a temporary folder if -### it does not already exist. These clones can be reused in future synchronizations, so it is -### recommended to dedicate a folder for this to speed up re-runs. -### -### USAGE: -### Synchronize current repository into a local VMR: -### ./vmr-sync.sh --tmp "$HOME/repos/tmp" "$HOME/repos/dotnet" -### -### Options: -### -t, --tmp, --tmp-dir PATH -### Required. Path to the temporary folder where repositories will be cloned -### -### -b, --branch, --vmr-branch BRANCH_NAME -### Optional. Branch of the 'dotnet/dotnet' repo to synchronize. The VMR will be checked out to this branch -### -### --debug -### Optional. Turns on the most verbose logging for the VMR tooling -### -### --remote name:URI -### Optional. Additional remote to use during the synchronization -### This can be used to synchronize to a commit from a fork of the repository -### Example: 'runtime:https://github.com/yourfork/runtime' -### -### --azdev-pat -### Optional. Azure DevOps PAT to use for cloning private repositories. -### -### -v, --vmr, --vmr-dir PATH -### Optional. Path to the dotnet/dotnet repository. When null, gets cloned to the temporary folder - -source="${BASH_SOURCE[0]}" - -# resolve $source until the file is no longer a symlink -while [[ -h "$source" ]]; do - scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - source="$(readlink "$source")" - # if $source was a relative symlink, we need to resolve it relative to the path where the - # symlink file was located - [[ $source != /* ]] && source="$scriptroot/$source" -done -scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" - -function print_help () { - sed -n '/^### /,/^$/p' "$source" | cut -b 5- -} - -COLOR_RED=$(tput setaf 1 2>/dev/null || true) -COLOR_CYAN=$(tput setaf 6 2>/dev/null || true) -COLOR_CLEAR=$(tput sgr0 2>/dev/null || true) -COLOR_RESET=uniquesearchablestring -FAILURE_PREFIX='> ' - -function fail () { - echo "${COLOR_RED}$FAILURE_PREFIX${1//${COLOR_RESET}/${COLOR_RED}}${COLOR_CLEAR}" >&2 -} - -function highlight () { - echo "${COLOR_CYAN}$FAILURE_PREFIX${1//${COLOR_RESET}/${COLOR_CYAN}}${COLOR_CLEAR}" -} - -tmp_dir='' -vmr_dir='' -vmr_branch='' -additional_remotes='' -verbosity=verbose -azdev_pat='' -ci=false - -while [[ $# -gt 0 ]]; do - opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" - case "$opt" in - -t|--tmp|--tmp-dir) - tmp_dir=$2 - shift - ;; - -v|--vmr|--vmr-dir) - vmr_dir=$2 - shift - ;; - -b|--branch|--vmr-branch) - vmr_branch=$2 - shift - ;; - --remote) - additional_remotes="$additional_remotes $2" - shift - ;; - --azdev-pat) - azdev_pat=$2 - shift - ;; - --ci) - ci=true - ;; - -d|--debug) - verbosity=debug - ;; - -h|--help) - print_help - exit 0 - ;; - *) - fail "Invalid argument: $1" - print_help - exit 1 - ;; - esac - - shift -done - -# Validation - -if [[ -z "$tmp_dir" ]]; then - fail "Missing --tmp-dir argument. Please specify the path to the temporary folder where the repositories will be cloned" - exit 1 -fi - -# Sanitize the input - -if [[ -z "$vmr_dir" ]]; then - vmr_dir="$tmp_dir/dotnet" -fi - -if [[ ! -d "$tmp_dir" ]]; then - mkdir -p "$tmp_dir" -fi - -if [[ "$verbosity" == "debug" ]]; then - set -x -fi - -# Prepare the VMR - -if [[ ! -d "$vmr_dir" ]]; then - highlight "Cloning 'dotnet/dotnet' into $vmr_dir.." - git clone https://github.com/dotnet/dotnet "$vmr_dir" - - if [[ -n "$vmr_branch" ]]; then - git -C "$vmr_dir" switch -c "$vmr_branch" - fi -else - if ! git -C "$vmr_dir" diff --quiet; then - fail "There are changes in the working tree of $vmr_dir. Please commit or stash your changes" - exit 1 - fi - - if [[ -n "$vmr_branch" ]]; then - highlight "Preparing $vmr_dir" - git -C "$vmr_dir" checkout "$vmr_branch" - git -C "$vmr_dir" pull - fi -fi - -set -e - -# Prepare darc - -highlight 'Installing .NET, preparing the tooling..' -source "./eng/common/tools.sh" -InitializeDotNetCli true -GetDarc -dotnetDir=$( cd ./.dotnet/; pwd -P ) -dotnet=$dotnetDir/dotnet - -highlight "Starting the synchronization of VMR.." -set +e - -if [[ -n "$additional_remotes" ]]; then - additional_remotes="--additional-remotes $additional_remotes" -fi - -if [[ -n "$azdev_pat" ]]; then - azdev_pat="--azdev-pat $azdev_pat" -fi - -ci_arg='' -if [[ "$ci" == "true" ]]; then - ci_arg="--ci" -fi - -# Synchronize the VMR - -export DOTNET_ROOT="$dotnetDir" - -"$darc_tool" vmr forwardflow \ - --tmp "$tmp_dir" \ - $azdev_pat \ - --$verbosity \ - $ci_arg \ - $additional_remotes \ - "$vmr_dir" - -if [[ $? == 0 ]]; then - highlight "Synchronization succeeded" -else - fail "Synchronization of repo to VMR failed!" - fail "'$vmr_dir' is left in its last state (re-run of this script will reset it)." - fail "Please inspect the logs which contain path to the failing patch file (use --debug to get all the details)." - fail "Once you make changes to the conflicting VMR patch, commit it locally and re-run this script." - exit 1 -fi diff --git a/eng/pipelines/public.yml b/eng/pipelines/public-eng.yml similarity index 100% rename from eng/pipelines/public.yml rename to eng/pipelines/public-eng.yml diff --git a/global.json b/global.json deleted file mode 100644 index 0ea1297e6..000000000 --- a/global.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "sdk": { - "version": "10.0.100-preview.7.25322.101", - "allowPrerelease": true, - "rollForward": "major" - }, - "tools": { - "dotnet": "10.0.100-preview.7.25322.101" - }, - "msbuild-sdks": { - "Microsoft.Build.NoTargets": "3.7.0", - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25380.108", - "Microsoft.VisualStudio.Internal.MicroBuild.Vsman": "2.0.174" - } -} From 32b338663e700ff6c93a53e0cd2cae43d8a43679 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 1 Aug 2025 14:54:25 -0700 Subject: [PATCH 003/118] Added a test for the checkout process. Need to see how the checkout names actually work. --- .../templates/stages/workload-public-build.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index ac5612439..6755b6310 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -13,6 +13,16 @@ stages: publish: logs: true steps: + # For checkout mechanics, see: + # https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#build-variables-devops-services + # https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/steps-checkout?view=azure-pipelines + # https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path + - checkout: self + path: $(Pipeline.Workspace)/s + - checkout: eng + path: $(Pipeline.Workspace)/s + - powershell: 'Get-ChildItem env:' + displayName: List Environment Variables - powershell: >- eng/common/build.ps1 -restore -build -pack -ci -msbuildEngine vs From 75b3c207e15835b88e022023c83f2db98a9ec0c7 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 1 Aug 2025 15:00:38 -0700 Subject: [PATCH 004/118] Added @self to see if that helps resolve the usage of the arcade job template. --- eng/pipelines/templates/stages/workload-public-build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index 6755b6310..3539849d0 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -2,7 +2,7 @@ stages: - stage: Build displayName: Build jobs: - - template: /eng/common/templates/job/job.yml + - template: /eng/common/templates/job/job.yml@self parameters: name: buildRepo displayName: Build Repo From d8a891d568abd0ca2f8fd97a1f46b3a228e10b19 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 1 Aug 2025 15:10:07 -0700 Subject: [PATCH 005/118] Path cannot be a fully-qualified path. It is relative to the workspace directory. --- eng/pipelines/templates/stages/workload-public-build.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index 3539849d0..572f077b6 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -18,9 +18,9 @@ stages: # https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/steps-checkout?view=azure-pipelines # https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path - checkout: self - path: $(Pipeline.Workspace)/s + path: s - checkout: eng - path: $(Pipeline.Workspace)/s + path: s - powershell: 'Get-ChildItem env:' displayName: List Environment Variables - powershell: >- From ea1174da5c239fbd9a458c39c49527a746edc715 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 1 Aug 2025 15:38:06 -0700 Subject: [PATCH 006/118] Checkout to separate directories and copy the one into the other. --- eng/pipelines/templates/stages/workload-public-build.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index 572f077b6..a9b608581 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -18,9 +18,11 @@ stages: # https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/steps-checkout?view=azure-pipelines # https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path - checkout: self - path: s + path: self - checkout: eng - path: s + path: eng + - powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng" -Destination "$(Agent.BuildDirectory)\self" -Recurse -Force + displayName: Copy `eng` to `self` - powershell: 'Get-ChildItem env:' displayName: List Environment Variables - powershell: >- From 69e15270c9e9360ea04984c2cb7e868487a49d72 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 1 Aug 2025 15:53:59 -0700 Subject: [PATCH 007/118] Trying single checkout path again but with workspaceRepo set to true now. --- .../templates/stages/workload-public-build.yml | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index a9b608581..fefa453cd 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -17,12 +17,15 @@ stages: # https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#build-variables-devops-services # https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/steps-checkout?view=azure-pipelines # https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path - - checkout: self - path: self - checkout: eng - path: eng - - powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng" -Destination "$(Agent.BuildDirectory)\self" -Recurse -Force - displayName: Copy `eng` to `self` + # path: eng + path: s + - checkout: self + # path: self + path: s + workspaceRepo: true + # - powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng" -Destination "$(Agent.BuildDirectory)\self" -Recurse -Force + # displayName: Copy `eng` to `self` - powershell: 'Get-ChildItem env:' displayName: List Environment Variables - powershell: >- From 52d43bde3d7ab02942e91b6dce111ab0c5e22777 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 1 Aug 2025 16:22:16 -0700 Subject: [PATCH 008/118] Checking out to separate directories again, but enhancing the copy and listing the contents. --- .../templates/stages/workload-public-build.yml | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index fefa453cd..098bec5c2 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -18,14 +18,18 @@ stages: # https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/steps-checkout?view=azure-pipelines # https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path - checkout: eng - # path: eng - path: s + path: eng + # path: s - checkout: self - # path: self - path: s + path: self + # path: s workspaceRepo: true - # - powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng" -Destination "$(Agent.BuildDirectory)\self" -Recurse -Force - # displayName: Copy `eng` to `self` + - powershell: Get-ChildItem -Path "$(Agent.BuildDirectory)\self" + displayName: List self contents + - powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng" -Destination "$(Agent.BuildDirectory)\self" -Exclude '.git','.gitignore' -Recurse -Force + displayName: Copy eng to self + - powershell: Get-ChildItem -Path "$(Agent.BuildDirectory)\self" + displayName: List self contents - powershell: 'Get-ChildItem env:' displayName: List Environment Variables - powershell: >- From 4684e7f410ddf4e5d8a9a5984403ad44e95a9c58 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 1 Aug 2025 16:42:36 -0700 Subject: [PATCH 009/118] Forgot the \* for the file copy. --- .../stages/workload-public-build.yml | 21 ++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index 098bec5c2..538f9fcef 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -24,12 +24,23 @@ stages: path: self # path: s workspaceRepo: true - - powershell: Get-ChildItem -Path "$(Agent.BuildDirectory)\self" - displayName: List self contents - - powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng" -Destination "$(Agent.BuildDirectory)\self" -Exclude '.git','.gitignore' -Recurse -Force + # - powershell: Get-ChildItem -Path "$(Agent.BuildDirectory)\self" + # displayName: List self contents + - powershell: | + Write-Host "Eng: $(Agent.BuildDirectory)\eng" + Get-ChildItem -Path "$(Agent.BuildDirectory)\eng" + Write-Host "Self: $(Agent.BuildDirectory)\eng" + Get-ChildItem -Path "$(Agent.BuildDirectory)\self" + Write-Host "Copying eng to self" + Copy-Item -Path "$(Agent.BuildDirectory)\eng\*" -Destination "$(Agent.BuildDirectory)\self" -Exclude '.git','.gitignore' -Recurse -Force + Write-Host "Copied eng to self" + Write-Host "Eng: $(Agent.BuildDirectory)\eng" + Get-ChildItem -Path "$(Agent.BuildDirectory)\eng" + Write-Host "Self: $(Agent.BuildDirectory)\eng" + Get-ChildItem -Path "$(Agent.BuildDirectory)\self" displayName: Copy eng to self - - powershell: Get-ChildItem -Path "$(Agent.BuildDirectory)\self" - displayName: List self contents + # - powershell: Get-ChildItem -Path "$(Agent.BuildDirectory)\self" + # displayName: List self contents - powershell: 'Get-ChildItem env:' displayName: List Environment Variables - powershell: >- From ba237f4078c53cb104a98bea739d1b7dc2361427 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 1 Aug 2025 17:00:02 -0700 Subject: [PATCH 010/118] Trying a workaround for a warning, ##[warning]Unable move and reuse existing repository to required location. Cleaned up code. --- .../stages/workload-public-build.yml | 23 ++++--------------- 1 file changed, 4 insertions(+), 19 deletions(-) diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index 538f9fcef..bdf7e1d91 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -18,29 +18,14 @@ stages: # https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/steps-checkout?view=azure-pipelines # https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path - checkout: eng - path: eng + path: s/eng-branch # path: s - checkout: self - path: self + path: s/self-branch # path: s workspaceRepo: true - # - powershell: Get-ChildItem -Path "$(Agent.BuildDirectory)\self" - # displayName: List self contents - - powershell: | - Write-Host "Eng: $(Agent.BuildDirectory)\eng" - Get-ChildItem -Path "$(Agent.BuildDirectory)\eng" - Write-Host "Self: $(Agent.BuildDirectory)\eng" - Get-ChildItem -Path "$(Agent.BuildDirectory)\self" - Write-Host "Copying eng to self" - Copy-Item -Path "$(Agent.BuildDirectory)\eng\*" -Destination "$(Agent.BuildDirectory)\self" -Exclude '.git','.gitignore' -Recurse -Force - Write-Host "Copied eng to self" - Write-Host "Eng: $(Agent.BuildDirectory)\eng" - Get-ChildItem -Path "$(Agent.BuildDirectory)\eng" - Write-Host "Self: $(Agent.BuildDirectory)\eng" - Get-ChildItem -Path "$(Agent.BuildDirectory)\self" - displayName: Copy eng to self - # - powershell: Get-ChildItem -Path "$(Agent.BuildDirectory)\self" - # displayName: List self contents + - powershell: Copy-Item -Path "$(Agent.BuildDirectory)\s\eng-branch\*" -Destination "$(Agent.BuildDirectory)\s\self-branch" -Exclude '.git','.gitignore' -Recurse -Force + displayName: Copy eng-branch to self-branch - powershell: 'Get-ChildItem env:' displayName: List Environment Variables - powershell: >- From d63394b88b817f7593cf45766b59cc0120d79104 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 1 Aug 2025 17:28:14 -0700 Subject: [PATCH 011/118] Added workaround for warning that should hopefully work. --- .../stages/workload-public-build.yml | 22 ++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index bdf7e1d91..f2a01c5bd 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -18,16 +18,28 @@ stages: # https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/steps-checkout?view=azure-pipelines # https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path - checkout: eng - path: s/eng-branch + path: eng-branch # path: s + displayName: 🟣 Checkout eng branch + # There is a warning when performing the second checkout, ##[warning]Unable move and reuse existing repository to required location. + # This happens because both checkouts are of the same repository, thus have the same path of: D:\a\_work\1\s\workload-versions + # The first checkout deletes this directory. To avoid the warning, simply create the directory beforehand. + # See: https://developercommunity.visualstudio.com/t/warning-when-fetching-additional-repositories/1065143 + - powershell: | + Write-Host "RepoName: $(Build.Repository.Name)" + New-Item -Path "$(Agent.BuildDirectory)\s" -Name "$(Build.Repository.Name)" -ItemType Directory + displayName: 🟣 (Workaround) Create checkout directory - checkout: self - path: s/self-branch + path: self-branch # path: s workspaceRepo: true - - powershell: Copy-Item -Path "$(Agent.BuildDirectory)\s\eng-branch\*" -Destination "$(Agent.BuildDirectory)\s\self-branch" -Exclude '.git','.gitignore' -Recurse -Force - displayName: Copy eng-branch to self-branch + displayName: 🟣 Checkout self branch + # The \* is required for the Exclude to work properly. + # See: https://stackoverflow.com/a/67407481/294804 + - powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng-branch\*" -Destination "$(Agent.BuildDirectory)\self-branch" -Exclude '.git','.gitignore' -Recurse -Force + displayName: 🟣 Copy eng-branch to self-branch - powershell: 'Get-ChildItem env:' - displayName: List Environment Variables + displayName: 🟣 List Environment Variables - powershell: >- eng/common/build.ps1 -restore -build -pack -ci -msbuildEngine vs From 8b7625b0fd5dc78267172046fc3493a0a4496cb1 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 1 Aug 2025 17:39:41 -0700 Subject: [PATCH 012/118] Let's see if flipping the checkout order helps. --- .../templates/stages/workload-public-build.yml | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index f2a01c5bd..c1f495825 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -17,10 +17,10 @@ stages: # https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#build-variables-devops-services # https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/steps-checkout?view=azure-pipelines # https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path - - checkout: eng - path: eng-branch - # path: s - displayName: 🟣 Checkout eng branch + - checkout: self + path: self-branch + workspaceRepo: true + displayName: 🟣 Checkout self branch # There is a warning when performing the second checkout, ##[warning]Unable move and reuse existing repository to required location. # This happens because both checkouts are of the same repository, thus have the same path of: D:\a\_work\1\s\workload-versions # The first checkout deletes this directory. To avoid the warning, simply create the directory beforehand. @@ -29,11 +29,9 @@ stages: Write-Host "RepoName: $(Build.Repository.Name)" New-Item -Path "$(Agent.BuildDirectory)\s" -Name "$(Build.Repository.Name)" -ItemType Directory displayName: 🟣 (Workaround) Create checkout directory - - checkout: self - path: self-branch - # path: s - workspaceRepo: true - displayName: 🟣 Checkout self branch + - checkout: eng + path: eng-branch + displayName: 🟣 Checkout eng branch # The \* is required for the Exclude to work properly. # See: https://stackoverflow.com/a/67407481/294804 - powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng-branch\*" -Destination "$(Agent.BuildDirectory)\self-branch" -Exclude '.git','.gitignore' -Recurse -Force From f36b471367f83a68f92a64d3c01c1bacad7b5cee Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 1 Aug 2025 17:59:22 -0700 Subject: [PATCH 013/118] Fix the workaround repo name for folder. --- eng/pipelines/templates/stages/workload-public-build.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index c1f495825..a327684da 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -25,9 +25,9 @@ stages: # This happens because both checkouts are of the same repository, thus have the same path of: D:\a\_work\1\s\workload-versions # The first checkout deletes this directory. To avoid the warning, simply create the directory beforehand. # See: https://developercommunity.visualstudio.com/t/warning-when-fetching-additional-repositories/1065143 - - powershell: | - Write-Host "RepoName: $(Build.Repository.Name)" - New-Item -Path "$(Agent.BuildDirectory)\s" -Name "$(Build.Repository.Name)" -ItemType Directory + # Despite all the documentation about Build.Repository.Name, it does not simply give you the repository name. It gives: organization/repositoryName + # To resolve this, we split on '/' and take the last element, which will always be the repository name only. + - powershell: New-Item -Path "$(Agent.BuildDirectory)\s" -Name "$($(Build.Repository.Name) -split '/' | Select-Object -Last 1)" -ItemType Directory displayName: 🟣 (Workaround) Create checkout directory - checkout: eng path: eng-branch From 0185c0c7ca2c72789df372b624dfd05a33bfe16d Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 1 Aug 2025 18:09:14 -0700 Subject: [PATCH 014/118] I forgot quotes. --- eng/pipelines/templates/stages/workload-public-build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index a327684da..a651a3283 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -27,7 +27,7 @@ stages: # See: https://developercommunity.visualstudio.com/t/warning-when-fetching-additional-repositories/1065143 # Despite all the documentation about Build.Repository.Name, it does not simply give you the repository name. It gives: organization/repositoryName # To resolve this, we split on '/' and take the last element, which will always be the repository name only. - - powershell: New-Item -Path "$(Agent.BuildDirectory)\s" -Name "$($(Build.Repository.Name) -split '/' | Select-Object -Last 1)" -ItemType Directory + - powershell: New-Item -Path "$(Agent.BuildDirectory)\s" -Name "$('$(Build.Repository.Name)' -Split '/' | Select-Object -Last 1)" -ItemType Directory displayName: 🟣 (Workaround) Create checkout directory - checkout: eng path: eng-branch From 5e3f73ca3b8aa07448453b27be17a4031f91a075 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Mon, 4 Aug 2025 19:15:14 -0700 Subject: [PATCH 015/118] Testing some pipeline parameter mechanics. --- eng/pipelines/official.yml | 65 ++++++++++++++++++++++++++++++++++---- 1 file changed, 58 insertions(+), 7 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index b94487c81..e8fad92a6 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -1,17 +1,25 @@ # Pipeline: https://dev.azure.com/dnceng/internal/_build?definitionId=1298 -trigger: - batch: true - branches: - include: - - main - - release/* +# trigger: +# batch: true +# branches: +# include: +# - main +# - release/* +trigger: none pr: none parameters: +- name: sourceBranchParam + displayName: Source Branch + type: string + value: + - main + - release/9 + - release/10 - name: stabilizePackageVersion - displayName: Stabilize package version + displayName: ⚠︎ OFFICIAL ⚠︎ 🚨 Stabilize package version type: boolean default: false - name: publishToAzDO @@ -62,6 +70,43 @@ parameters: displayName: 'Secondary VS insertion branches [packs only]' type: object default: [] +- name: testStep + displayName: Test Step + type: step + default: [] +- name: testStepList + displayName: Test Step List + type: stepList + default: [] +- name: testJob + displayName: Test Job + type: job + default: [] +- name: testJobList + displayName: Test Job List + type: jobList + default: [] +- name: testDeployment + displayName: Test Deployment + type: deployment + default: [] +- name: testDeploymentList + displayName: Test Deployment List + type: deploymentList + default: [] +- name: testStage + displayName: Test Stage + type: stage + default: [] +- name: testStageList + displayName: Test Stage List + type: stageList + default: [] +- name: empty + displayName: ------------------------------------ + type: boolean + default: false + enabled: false variables: # Variables used: DncEngInternalBuildPool @@ -93,6 +138,12 @@ resources: type: git name: 1ESPipelineTemplates/1ESPipelineTemplates ref: refs/tags/release + - repository: source + type: github + name: dotnet/workload-versions + ref: ${{ variables['Build.SourceBranch'] }} + # https://dev.azure.com/dnceng-public/public/_settings/adminservices?resourceId=690f39b4-7746-42c2-be89-281bd7c78b9e + endpoint: public extends: template: v1/1ES.Official.PipelineTemplate.yml@1esPipelines From 24bcc422e7b2219d75719701d75b868847e79480 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Mon, 4 Aug 2025 19:20:02 -0700 Subject: [PATCH 016/118] Fixing incorrect names. --- eng/pipelines/official.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index e8fad92a6..108e7622f 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -14,7 +14,7 @@ parameters: - name: sourceBranchParam displayName: Source Branch type: string - value: + values: - main - release/9 - release/10 @@ -106,7 +106,7 @@ parameters: displayName: ------------------------------------ type: boolean default: false - enabled: false + enable: false variables: # Variables used: DncEngInternalBuildPool From bd9dc51bf59331ff4dd0a9ba54d03c61b26584c6 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Mon, 4 Aug 2025 19:26:28 -0700 Subject: [PATCH 017/118] Pipeline parameters cannot be disabled. --- eng/pipelines/official.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 108e7622f..7b49498f0 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -106,7 +106,6 @@ parameters: displayName: ------------------------------------ type: boolean default: false - enable: false variables: # Variables used: DncEngInternalBuildPool From 226e40f56d93adc837c36a32a8f74018732ebd09 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Mon, 4 Aug 2025 19:32:57 -0700 Subject: [PATCH 018/118] Testing more UI shenanigans. --- eng/pipelines/official.yml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 7b49498f0..697301afb 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -103,7 +103,15 @@ parameters: type: stageList default: [] - name: empty - displayName: ------------------------------------ + displayName: ------------------------------------⚠️❗️‼️ⓘ------------------------------------ + type: boolean + default: false +- name: empty2 + displayName: | + I've got a lovely + bunch of coconuts. + Here's they are + standing in a row. type: boolean default: false From 1a0e11b381de7dd73aa1e10a9d612f1a4d818b03 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Mon, 4 Aug 2025 19:45:34 -0700 Subject: [PATCH 019/118] Added source branch reference logic. --- eng/pipelines/official.yml | 80 +++++++++++++++++++------------------- 1 file changed, 39 insertions(+), 41 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 697301afb..35dd97c90 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -11,15 +11,12 @@ trigger: none pr: none parameters: -- name: sourceBranchParam +- name: sourceBranch displayName: Source Branch type: string - values: - - main - - release/9 - - release/10 + default: main - name: stabilizePackageVersion - displayName: ⚠︎ OFFICIAL ⚠︎ 🚨 Stabilize package version + displayName: ⚠︎ OFFICIAL ⚠︎ 🚨 🟣 Stabilize package version ⚠️❗️‼️ⓘ type: boolean default: false - name: publishToAzDO @@ -70,40 +67,41 @@ parameters: displayName: 'Secondary VS insertion branches [packs only]' type: object default: [] -- name: testStep - displayName: Test Step - type: step - default: [] -- name: testStepList - displayName: Test Step List - type: stepList - default: [] -- name: testJob - displayName: Test Job - type: job - default: [] -- name: testJobList - displayName: Test Job List - type: jobList - default: [] -- name: testDeployment - displayName: Test Deployment - type: deployment - default: [] -- name: testDeploymentList - displayName: Test Deployment List - type: deploymentList - default: [] -- name: testStage - displayName: Test Stage - type: stage - default: [] -- name: testStageList - displayName: Test Stage List - type: stageList - default: [] -- name: empty - displayName: ------------------------------------⚠️❗️‼️ⓘ------------------------------------ +# - name: testStep +# displayName: Test Step +# type: step +# default: [] +# - name: testStepList +# displayName: Test Step List +# type: stepList +# default: [] +# - name: testJob +# displayName: Test Job +# type: job +# default: [] +# - name: testJobList +# displayName: Test Job List +# type: jobList +# default: [] +# - name: testDeployment +# displayName: Test Deployment +# type: deployment +# default: [] +# - name: testDeploymentList +# displayName: Test Deployment List +# type: deploymentList +# default: [] +# - name: testStage +# displayName: Test Stage +# type: stage +# default: [] +# - name: testStageList +# displayName: Test Stage List +# type: stageList +# default: [] +- name: divider + # 66 character max width for a single line + displayName: ------------------------------------------------------------------ type: boolean default: false - name: empty2 @@ -148,7 +146,7 @@ resources: - repository: source type: github name: dotnet/workload-versions - ref: ${{ variables['Build.SourceBranch'] }} + ref: ${{ format('refs/heads/{0}', parameters.sourceBranch) }} # https://dev.azure.com/dnceng-public/public/_settings/adminservices?resourceId=690f39b4-7746-42c2-be89-281bd7c78b9e endpoint: public From d9b3d58df34f732a66991165ff68b6d46624e483 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 5 Aug 2025 13:50:42 -0700 Subject: [PATCH 020/118] Reorganized and sectioned off the pipeline parameters. --- eng/pipelines/official.yml | 108 ++++++++++++++++++------------------- 1 file changed, 52 insertions(+), 56 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 35dd97c90..c35d58225 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -7,16 +7,18 @@ # - main # - release/* +# Note: This pipeline can never run automatically because the sourceBranch parameter is required. trigger: none pr: none parameters: - name: sourceBranch - displayName: Source Branch + displayName: 🚩 Source Branch 🚩 type: string - default: main -- name: stabilizePackageVersion - displayName: ⚠︎ OFFICIAL ⚠︎ 🚨 🟣 Stabilize package version ⚠️❗️‼️ⓘ + +- name: dividerAzDO + # ~66 character max width for a single line, but the font is not a fixed-width font. + displayName: =-=-=-=-=-=-=-=-=-=-=-=-=-💠AzDO💠-=-=-=-=-=-=-=-=-=-=-=-=-= type: boolean default: false - name: publishToAzDO @@ -27,26 +29,21 @@ parameters: displayName: AzDO publish feed type: string default: public/dotnet10-workloads -- name: publishToNuGet - displayName: Publish to NuGet.org + +- name: dividerVSInsertion + # ~66 character max width for a single line, but the font is not a fixed-width font. + displayName: =-=-=-=-=-=-=-=-=-=-=-=-=-♾️VS INSERTION♾️-=-=-=-=-=-=-=-=-=-=-=-=-= type: boolean default: false - name: createVSInsertion displayName: Create VS insertion type: boolean default: false -- name: usePreComponentsForVSInsertion - displayName: Use Preview Components for VS insertion - type: boolean - default: false -- name: includeNonShippingWorkloads - displayName: Include non-shipping workloads - type: boolean - default: false - name: vsTopicBranch displayName: 'VS Topic Branch [default: temp/{team}/{target}/yyyy-MM]' type: string default: '|default|' +# TODO: This needs fixed for single-entry values. - name: workloadDropNames displayName: Workload drop names type: object @@ -67,51 +64,50 @@ parameters: displayName: 'Secondary VS insertion branches [packs only]' type: object default: [] -# - name: testStep -# displayName: Test Step -# type: step -# default: [] -# - name: testStepList -# displayName: Test Step List -# type: stepList -# default: [] -# - name: testJob -# displayName: Test Job -# type: job -# default: [] -# - name: testJobList -# displayName: Test Job List -# type: jobList -# default: [] -# - name: testDeployment -# displayName: Test Deployment -# type: deployment -# default: [] -# - name: testDeploymentList -# displayName: Test Deployment List -# type: deploymentList -# default: [] -# - name: testStage -# displayName: Test Stage -# type: stage -# default: [] -# - name: testStageList -# displayName: Test Stage List -# type: stageList -# default: [] -- name: divider - # 66 character max width for a single line - displayName: ------------------------------------------------------------------ + +- name: dividerOfficial + # ~66 character max width for a single line, but the font is not a fixed-width font. + displayName: =-=-=-=-=-=-=-=-=-=-=-=-=-🟣OFFICIAL🟣-=-=-=-=-=-=-=-=-=-=-=-=-= + type: boolean + default: false +- name: stabilizePackageVersion + displayName: 🚨 Stabilize package version + # displayName: ⚠︎ OFFICIAL ⚠︎ 🚨 🟣 Stabilize package version ⚠️❗️‼️ⓘ + type: boolean + default: false +- name: publishToNuGet + displayName: 🚨 Publish to NuGet.org type: boolean default: false -- name: empty2 - displayName: | - I've got a lovely - bunch of coconuts. - Here's they are - standing in a row. + +- name: dividerAdvanced + # ~66 character max width for a single line, but the font is not a fixed-width font. + displayName: =-=-=-=-=-=-=-=-=-=-=-=-=-⚠️ADVANCED⚠️-=-=-=-=-=-=-=-=-=-=-=-=-= type: boolean default: false +- name: usePreComponentsForVSInsertion + displayName: Use Preview Components for VS insertion + type: boolean + default: false +- name: includeNonShippingWorkloads + displayName: Include non-shipping workloads + type: boolean + default: false + +# - name: divider +# # ~66 character max width for a single line, but the font is not a fixed-width font. +# displayName: ------------------------------------------------------------------ +# type: boolean +# default: false +# - name: empty2 +# displayName: | +# I've got a lovely +# bunch of coconuts. +# Here's they are +# standing in a row. +# type: boolean +# default: false + variables: # Variables used: DncEngInternalBuildPool @@ -147,7 +143,7 @@ resources: type: github name: dotnet/workload-versions ref: ${{ format('refs/heads/{0}', parameters.sourceBranch) }} - # https://dev.azure.com/dnceng-public/public/_settings/adminservices?resourceId=690f39b4-7746-42c2-be89-281bd7c78b9e + # Service connection: https://dev.azure.com/dnceng-public/public/_settings/adminservices?resourceId=690f39b4-7746-42c2-be89-281bd7c78b9e endpoint: public extends: From 8a36879f69bc17690a7954a2436383442dd6cc9d Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 5 Aug 2025 14:32:11 -0700 Subject: [PATCH 021/118] Trying to make checkout a template (might not work). More adjustments to pipeline parameters. --- eng/pipelines/official.yml | 16 +++--- .../stages/workload-public-build.yml | 54 ++++++++++--------- .../templates/steps/workload-checkout.yml | 28 ++++++++++ 3 files changed, 65 insertions(+), 33 deletions(-) create mode 100644 eng/pipelines/templates/steps/workload-checkout.yml diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index c35d58225..125b33729 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -17,8 +17,8 @@ parameters: type: string - name: dividerAzDO - # ~66 character max width for a single line, but the font is not a fixed-width font. - displayName: =-=-=-=-=-=-=-=-=-=-=-=-=-💠AzDO💠-=-=-=-=-=-=-=-=-=-=-=-=-= + # The display name is not a fixed-width font. End it here: --| + displayName: =-=-=-=-=-=-=-=-=-=-💠AzDO💠-=-=-=-=-=-=-=-=-=-= type: boolean default: false - name: publishToAzDO @@ -31,8 +31,8 @@ parameters: default: public/dotnet10-workloads - name: dividerVSInsertion - # ~66 character max width for a single line, but the font is not a fixed-width font. - displayName: =-=-=-=-=-=-=-=-=-=-=-=-=-♾️VS INSERTION♾️-=-=-=-=-=-=-=-=-=-=-=-=-= + # The display name is not a fixed-width font. End it here: --| + displayName: =-=-=-=-=-=-=-=-♾️VS INSERTION♾️-=-=-=-=-=-=-=-= type: boolean default: false - name: createVSInsertion @@ -66,8 +66,8 @@ parameters: default: [] - name: dividerOfficial - # ~66 character max width for a single line, but the font is not a fixed-width font. - displayName: =-=-=-=-=-=-=-=-=-=-=-=-=-🟣OFFICIAL🟣-=-=-=-=-=-=-=-=-=-=-=-=-= + # The display name is not a fixed-width font. End it here: --| + displayName: =-=-=-=-=-=-=-=-=-🟣OFFICIAL🟣-=-=-=-=-=-=-=-=-= type: boolean default: false - name: stabilizePackageVersion @@ -81,8 +81,8 @@ parameters: default: false - name: dividerAdvanced - # ~66 character max width for a single line, but the font is not a fixed-width font. - displayName: =-=-=-=-=-=-=-=-=-=-=-=-=-⚠️ADVANCED⚠️-=-=-=-=-=-=-=-=-=-=-=-=-= + # The display name is not a fixed-width font. End it here: --| + displayName: =-=-=-=-=-=-=-=-=-⚠️ADVANCED⚠️-=-=-=-=-=-=-=-=-= type: boolean default: false - name: usePreComponentsForVSInsertion diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index a651a3283..e76c9cc15 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -13,31 +13,35 @@ stages: publish: logs: true steps: - # For checkout mechanics, see: - # https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#build-variables-devops-services - # https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/steps-checkout?view=azure-pipelines - # https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path - - checkout: self - path: self-branch - workspaceRepo: true - displayName: 🟣 Checkout self branch - # There is a warning when performing the second checkout, ##[warning]Unable move and reuse existing repository to required location. - # This happens because both checkouts are of the same repository, thus have the same path of: D:\a\_work\1\s\workload-versions - # The first checkout deletes this directory. To avoid the warning, simply create the directory beforehand. - # See: https://developercommunity.visualstudio.com/t/warning-when-fetching-additional-repositories/1065143 - # Despite all the documentation about Build.Repository.Name, it does not simply give you the repository name. It gives: organization/repositoryName - # To resolve this, we split on '/' and take the last element, which will always be the repository name only. - - powershell: New-Item -Path "$(Agent.BuildDirectory)\s" -Name "$('$(Build.Repository.Name)' -Split '/' | Select-Object -Last 1)" -ItemType Directory - displayName: 🟣 (Workaround) Create checkout directory - - checkout: eng - path: eng-branch - displayName: 🟣 Checkout eng branch - # The \* is required for the Exclude to work properly. - # See: https://stackoverflow.com/a/67407481/294804 - - powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng-branch\*" -Destination "$(Agent.BuildDirectory)\self-branch" -Exclude '.git','.gitignore' -Recurse -Force - displayName: 🟣 Copy eng-branch to self-branch - - powershell: 'Get-ChildItem env:' - displayName: 🟣 List Environment Variables + - template: /eng/pipelines/templates/steps/workload-checkout.yml@eng + # parameters: + # sourceBranch: self + # engBranch: eng + # # For checkout mechanics, see: + # # https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#build-variables-devops-services + # # https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/steps-checkout?view=azure-pipelines + # # https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path + # - checkout: self + # path: self-branch + # workspaceRepo: true + # displayName: 🟣 Checkout self branch + # # There is a warning when performing the second checkout, ##[warning]Unable move and reuse existing repository to required location. + # # This happens because both checkouts are of the same repository, thus have the same path of: D:\a\_work\1\s\workload-versions + # # The first checkout deletes this directory. To avoid the warning, simply create the directory beforehand. + # # See: https://developercommunity.visualstudio.com/t/warning-when-fetching-additional-repositories/1065143 + # # Despite all the documentation about Build.Repository.Name, it does not simply give you the repository name. It gives: organization/repositoryName + # # To resolve this, we split on '/' and take the last element, which will always be the repository name only. + # - powershell: New-Item -Path "$(Agent.BuildDirectory)\s" -Name "$('$(Build.Repository.Name)' -Split '/' | Select-Object -Last 1)" -ItemType Directory + # displayName: 🟣 (Workaround) Create checkout directory + # - checkout: eng + # path: eng-branch + # displayName: 🟣 Checkout eng branch + # # The \* is required for the Exclude to work properly. + # # See: https://stackoverflow.com/a/67407481/294804 + # - powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng-branch\*" -Destination "$(Agent.BuildDirectory)\self-branch" -Exclude '.git','.gitignore' -Recurse -Force + # displayName: 🟣 Copy eng-branch to self-branch + # - powershell: 'Get-ChildItem env:' + # displayName: 🟣 List Environment Variables - powershell: >- eng/common/build.ps1 -restore -build -pack -ci -msbuildEngine vs diff --git a/eng/pipelines/templates/steps/workload-checkout.yml b/eng/pipelines/templates/steps/workload-checkout.yml new file mode 100644 index 000000000..5e98a724d --- /dev/null +++ b/eng/pipelines/templates/steps/workload-checkout.yml @@ -0,0 +1,28 @@ +parameters: + sourceBranch: self + engBranch: eng + +steps: +# For checkout mechanics, see: +# https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#build-variables-devops-services +# https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/steps-checkout?view=azure-pipelines +# https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path +- checkout: ${{ parameters.sourceBranch }} + path: self-branch + workspaceRepo: true + displayName: 🟣 Checkout self branch +# There is a warning when performing the second checkout, ##[warning]Unable move and reuse existing repository to required location. +# This happens because both checkouts are of the same repository, thus have the same path of: D:\a\_work\1\s\workload-versions +# The first checkout deletes this directory. To avoid the warning, simply create the directory beforehand. +# See: https://developercommunity.visualstudio.com/t/warning-when-fetching-additional-repositories/1065143 +# Despite all the documentation about Build.Repository.Name, it does not simply give you the repository name. It gives: organization/repositoryName +# To resolve this, we split on '/' and take the last element, which will always be the repository name only. +- powershell: New-Item -Path "$(Agent.BuildDirectory)\s" -Name "$('$(Build.Repository.Name)' -Split '/' | Select-Object -Last 1)" -ItemType Directory + displayName: 🟣 (Workaround) Create checkout directory +- checkout: ${{ parameters.engBranch }} + path: eng-branch + displayName: 🟣 Checkout eng branch +# The \* is required for the Exclude to work properly. +# See: https://stackoverflow.com/a/67407481/294804 +- powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng-branch\*" -Destination "$(Agent.BuildDirectory)\self-branch" -Exclude '.git','.gitignore' -Recurse -Force + displayName: 🟣 Copy eng-branch to self-branch \ No newline at end of file From 9522c1d5efd67253a9aa57456e74d57937402feb Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 5 Aug 2025 14:55:10 -0700 Subject: [PATCH 022/118] Getting the checkout mechanics hooked up for the official build. --- eng/pipelines/official.yml | 14 ++++------ .../templates/jobs/workload-build.yml | 4 +++ .../stages/workload-public-build.yml | 28 ------------------- .../templates/steps/workload-checkout.yml | 10 +++---- 4 files changed, 14 insertions(+), 42 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 125b33729..9ecaa9e7b 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -17,8 +17,7 @@ parameters: type: string - name: dividerAzDO - # The display name is not a fixed-width font. End it here: --| - displayName: =-=-=-=-=-=-=-=-=-=-💠AzDO💠-=-=-=-=-=-=-=-=-=-= + displayName: === 💠 AZURE DEVOPS 💠 === type: boolean default: false - name: publishToAzDO @@ -31,8 +30,7 @@ parameters: default: public/dotnet10-workloads - name: dividerVSInsertion - # The display name is not a fixed-width font. End it here: --| - displayName: =-=-=-=-=-=-=-=-♾️VS INSERTION♾️-=-=-=-=-=-=-=-= + displayName: === ♾️ VS INSERTION ♾️ === type: boolean default: false - name: createVSInsertion @@ -66,13 +64,11 @@ parameters: default: [] - name: dividerOfficial - # The display name is not a fixed-width font. End it here: --| - displayName: =-=-=-=-=-=-=-=-=-🟣OFFICIAL🟣-=-=-=-=-=-=-=-=-= + displayName: === 🟣 OFFICIAL 🟣 === type: boolean default: false - name: stabilizePackageVersion displayName: 🚨 Stabilize package version - # displayName: ⚠︎ OFFICIAL ⚠︎ 🚨 🟣 Stabilize package version ⚠️❗️‼️ⓘ type: boolean default: false - name: publishToNuGet @@ -81,8 +77,7 @@ parameters: default: false - name: dividerAdvanced - # The display name is not a fixed-width font. End it here: --| - displayName: =-=-=-=-=-=-=-=-=-⚠️ADVANCED⚠️-=-=-=-=-=-=-=-=-= + displayName: === ⚠️ ADVANCED ⚠️ === type: boolean default: false - name: usePreComponentsForVSInsertion @@ -94,6 +89,7 @@ parameters: type: boolean default: false + # displayName: ⚠︎ OFFICIAL ⚠︎ 🚨 🟣 Stabilize package version ⚠️❗️‼️ⓘ # - name: divider # # ~66 character max width for a single line, but the font is not a fixed-width font. # displayName: ------------------------------------------------------------------ diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 76dbfa825..a31b64ec8 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -34,6 +34,10 @@ jobs: image: 1es-windows-2022 os: windows steps: + - template: /eng/pipelines/templates/steps/workload-checkout.yml + parameters: + sourceBranch: source + engBranch: self - ${{ if eq(parameters.createVSInsertion, true) }}: - task: AzureCLI@2 displayName: 🟣 Download workloads for VS insertion diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index e76c9cc15..caf5eaa98 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -14,34 +14,6 @@ stages: logs: true steps: - template: /eng/pipelines/templates/steps/workload-checkout.yml@eng - # parameters: - # sourceBranch: self - # engBranch: eng - # # For checkout mechanics, see: - # # https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#build-variables-devops-services - # # https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/steps-checkout?view=azure-pipelines - # # https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path - # - checkout: self - # path: self-branch - # workspaceRepo: true - # displayName: 🟣 Checkout self branch - # # There is a warning when performing the second checkout, ##[warning]Unable move and reuse existing repository to required location. - # # This happens because both checkouts are of the same repository, thus have the same path of: D:\a\_work\1\s\workload-versions - # # The first checkout deletes this directory. To avoid the warning, simply create the directory beforehand. - # # See: https://developercommunity.visualstudio.com/t/warning-when-fetching-additional-repositories/1065143 - # # Despite all the documentation about Build.Repository.Name, it does not simply give you the repository name. It gives: organization/repositoryName - # # To resolve this, we split on '/' and take the last element, which will always be the repository name only. - # - powershell: New-Item -Path "$(Agent.BuildDirectory)\s" -Name "$('$(Build.Repository.Name)' -Split '/' | Select-Object -Last 1)" -ItemType Directory - # displayName: 🟣 (Workaround) Create checkout directory - # - checkout: eng - # path: eng-branch - # displayName: 🟣 Checkout eng branch - # # The \* is required for the Exclude to work properly. - # # See: https://stackoverflow.com/a/67407481/294804 - # - powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng-branch\*" -Destination "$(Agent.BuildDirectory)\self-branch" -Exclude '.git','.gitignore' -Recurse -Force - # displayName: 🟣 Copy eng-branch to self-branch - # - powershell: 'Get-ChildItem env:' - # displayName: 🟣 List Environment Variables - powershell: >- eng/common/build.ps1 -restore -build -pack -ci -msbuildEngine vs diff --git a/eng/pipelines/templates/steps/workload-checkout.yml b/eng/pipelines/templates/steps/workload-checkout.yml index 5e98a724d..ee32da50f 100644 --- a/eng/pipelines/templates/steps/workload-checkout.yml +++ b/eng/pipelines/templates/steps/workload-checkout.yml @@ -8,9 +8,9 @@ steps: # https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/steps-checkout?view=azure-pipelines # https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path - checkout: ${{ parameters.sourceBranch }} - path: self-branch + path: source-branch workspaceRepo: true - displayName: 🟣 Checkout self branch + displayName: 🟣 Checkout source branch # There is a warning when performing the second checkout, ##[warning]Unable move and reuse existing repository to required location. # This happens because both checkouts are of the same repository, thus have the same path of: D:\a\_work\1\s\workload-versions # The first checkout deletes this directory. To avoid the warning, simply create the directory beforehand. @@ -18,11 +18,11 @@ steps: # Despite all the documentation about Build.Repository.Name, it does not simply give you the repository name. It gives: organization/repositoryName # To resolve this, we split on '/' and take the last element, which will always be the repository name only. - powershell: New-Item -Path "$(Agent.BuildDirectory)\s" -Name "$('$(Build.Repository.Name)' -Split '/' | Select-Object -Last 1)" -ItemType Directory - displayName: 🟣 (Workaround) Create checkout directory + displayName: 🟣 [Workaround] Create checkout directory - checkout: ${{ parameters.engBranch }} path: eng-branch displayName: 🟣 Checkout eng branch # The \* is required for the Exclude to work properly. # See: https://stackoverflow.com/a/67407481/294804 -- powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng-branch\*" -Destination "$(Agent.BuildDirectory)\self-branch" -Exclude '.git','.gitignore' -Recurse -Force - displayName: 🟣 Copy eng-branch to self-branch \ No newline at end of file +- powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng-branch\*" -Destination "$(Agent.BuildDirectory)\source-branch" -Exclude '.git','.gitignore' -Recurse -Force + displayName: 🟣 Copy eng-branch to source-branch \ No newline at end of file From b3cfbc96ac656514b0c8e490faf0b79e41bd3920 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 5 Aug 2025 15:07:01 -0700 Subject: [PATCH 023/118] Fix source repo ref. Adjust parameter headers again. --- eng/pipelines/official.yml | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 9ecaa9e7b..9ae9f0203 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -17,7 +17,7 @@ parameters: type: string - name: dividerAzDO - displayName: === 💠 AZURE DEVOPS 💠 === + displayName: '[###### 💠 AZURE DEVOPS 💠 ######]' type: boolean default: false - name: publishToAzDO @@ -30,7 +30,7 @@ parameters: default: public/dotnet10-workloads - name: dividerVSInsertion - displayName: === ♾️ VS INSERTION ♾️ === + displayName: '[###### ♾️ VS INSERTION ♾️ ######]' type: boolean default: false - name: createVSInsertion @@ -64,7 +64,7 @@ parameters: default: [] - name: dividerOfficial - displayName: === 🟣 OFFICIAL 🟣 === + displayName: '[###### 🟣 OFFICIAL 🟣 ######]' type: boolean default: false - name: stabilizePackageVersion @@ -77,7 +77,7 @@ parameters: default: false - name: dividerAdvanced - displayName: === ⚠️ ADVANCED ⚠️ === + displayName: '[###### ⚠️ ADVANCED ⚠️ ######]' type: boolean default: false - name: usePreComponentsForVSInsertion @@ -136,11 +136,9 @@ resources: name: 1ESPipelineTemplates/1ESPipelineTemplates ref: refs/tags/release - repository: source - type: github - name: dotnet/workload-versions + type: git + name: internal/dotnet-workload-versions ref: ${{ format('refs/heads/{0}', parameters.sourceBranch) }} - # Service connection: https://dev.azure.com/dnceng-public/public/_settings/adminservices?resourceId=690f39b4-7746-42c2-be89-281bd7c78b9e - endpoint: public extends: template: v1/1ES.Official.PipelineTemplate.yml@1esPipelines From 687d9fc0389bb2d21ef3cadfca43efa4db078c87 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 5 Aug 2025 15:10:20 -0700 Subject: [PATCH 024/118] Forgot to properly @ the template locations. --- eng/pipelines/official.yml | 2 +- eng/pipelines/templates/jobs/workload-build.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 9ae9f0203..f8d25226f 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -107,7 +107,7 @@ parameters: variables: # Variables used: DncEngInternalBuildPool -- template: /eng/common/templates-official/variables/pool-providers.yml@self +- template: /eng/common/templates-official/variables/pool-providers.yml@source ############### ARCADE ############### # Both this (used in Arcade for the MicroBuildSigningPlugin) and DotNetSignType (used in Arcade in Sign.proj) are necessary to set the sign type. # https://github.com/dotnet/arcade/blob/ccae251ef033746eb0213329953f5e3c1687693b/Documentation/ArcadeSdk.md#common-steps-in-azure-devops-pipeline diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index a31b64ec8..a2d80d026 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -10,7 +10,7 @@ parameters: jobs: # Note: jobs.yml automatically includes the Publish Assets job for pushing the artifacts to DARC. -- template: /eng/common/templates-official/jobs/jobs.yml +- template: /eng/common/templates-official/jobs/jobs.yml@source parameters: enableMicrobuild: true enablePublishBuildAssets: true @@ -34,7 +34,7 @@ jobs: image: 1es-windows-2022 os: windows steps: - - template: /eng/pipelines/templates/steps/workload-checkout.yml + - template: /eng/pipelines/templates/steps/workload-checkout.yml@self parameters: sourceBranch: source engBranch: self From 6135a49ff2054cb78cfeb7e38c557648e0ecaf50 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 5 Aug 2025 15:16:08 -0700 Subject: [PATCH 025/118] Added source to SDL scan list. --- eng/pipelines/official.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index f8d25226f..e586236b6 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -144,6 +144,9 @@ extends: template: v1/1ES.Official.PipelineTemplate.yml@1esPipelines parameters: sdl: + sourceRepositoriesToScan: + include: + - repository: source sourceAnalysisPool: name: $(DncEngInternalBuildPool) image: 1es-windows-2022 From a11081ccab001632a5a808b7373ad3d86cebbc3d Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 5 Aug 2025 15:46:48 -0700 Subject: [PATCH 026/118] Trying to name the source-branch s to see what CredScan does. --- eng/pipelines/templates/steps/workload-checkout.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/eng/pipelines/templates/steps/workload-checkout.yml b/eng/pipelines/templates/steps/workload-checkout.yml index ee32da50f..1ccd5da06 100644 --- a/eng/pipelines/templates/steps/workload-checkout.yml +++ b/eng/pipelines/templates/steps/workload-checkout.yml @@ -8,7 +8,7 @@ steps: # https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/steps-checkout?view=azure-pipelines # https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path - checkout: ${{ parameters.sourceBranch }} - path: source-branch + path: s workspaceRepo: true displayName: 🟣 Checkout source branch # There is a warning when performing the second checkout, ##[warning]Unable move and reuse existing repository to required location. @@ -24,5 +24,5 @@ steps: displayName: 🟣 Checkout eng branch # The \* is required for the Exclude to work properly. # See: https://stackoverflow.com/a/67407481/294804 -- powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng-branch\*" -Destination "$(Agent.BuildDirectory)\source-branch" -Exclude '.git','.gitignore' -Recurse -Force +- powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng-branch\*" -Destination "$(Agent.BuildDirectory)\s" -Exclude '.git','.gitignore' -Recurse -Force displayName: 🟣 Copy eng-branch to source-branch \ No newline at end of file From 2ae8929ac4c390c472150e3d8f14a2299d6b671f Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 5 Aug 2025 16:16:51 -0700 Subject: [PATCH 027/118] Adjusted section dividers. Added nowarn for no Readme in package. --- Directory.Build.props | 2 ++ eng/pipelines/official.yml | 8 ++++---- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/Directory.Build.props b/Directory.Build.props index 84ddd61c9..0f303e631 100644 --- a/Directory.Build.props +++ b/Directory.Build.props @@ -10,6 +10,8 @@ true true + + $(NoWarn);NU5039 \ No newline at end of file diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index e586236b6..ad637dcf9 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -17,7 +17,7 @@ parameters: type: string - name: dividerAzDO - displayName: '[###### 💠 AZURE DEVOPS 💠 ######]' + displayName: '[ ######## AZURE DEVOPS ######## ]' type: boolean default: false - name: publishToAzDO @@ -30,7 +30,7 @@ parameters: default: public/dotnet10-workloads - name: dividerVSInsertion - displayName: '[###### ♾️ VS INSERTION ♾️ ######]' + displayName: '[ ######## VS INSERTION ######## ]' type: boolean default: false - name: createVSInsertion @@ -64,7 +64,7 @@ parameters: default: [] - name: dividerOfficial - displayName: '[###### 🟣 OFFICIAL 🟣 ######]' + displayName: '[ ######## OFFICIAL ######## ]' type: boolean default: false - name: stabilizePackageVersion @@ -77,7 +77,7 @@ parameters: default: false - name: dividerAdvanced - displayName: '[###### ⚠️ ADVANCED ⚠️ ######]' + displayName: '[ ######## ADVANCED ######## ]' type: boolean default: false - name: usePreComponentsForVSInsertion From d2fc5483775c110d50f8d7613e7e39817219be70 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 5 Aug 2025 16:31:26 -0700 Subject: [PATCH 028/118] Removed commented code. Switched back to source-branch. --- eng/pipelines/official.yml | 16 ---------------- .../templates/steps/workload-checkout.yml | 4 ++-- 2 files changed, 2 insertions(+), 18 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index ad637dcf9..08054eee4 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -89,22 +89,6 @@ parameters: type: boolean default: false - # displayName: ⚠︎ OFFICIAL ⚠︎ 🚨 🟣 Stabilize package version ⚠️❗️‼️ⓘ -# - name: divider -# # ~66 character max width for a single line, but the font is not a fixed-width font. -# displayName: ------------------------------------------------------------------ -# type: boolean -# default: false -# - name: empty2 -# displayName: | -# I've got a lovely -# bunch of coconuts. -# Here's they are -# standing in a row. -# type: boolean -# default: false - - variables: # Variables used: DncEngInternalBuildPool - template: /eng/common/templates-official/variables/pool-providers.yml@source diff --git a/eng/pipelines/templates/steps/workload-checkout.yml b/eng/pipelines/templates/steps/workload-checkout.yml index 1ccd5da06..ee32da50f 100644 --- a/eng/pipelines/templates/steps/workload-checkout.yml +++ b/eng/pipelines/templates/steps/workload-checkout.yml @@ -8,7 +8,7 @@ steps: # https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/steps-checkout?view=azure-pipelines # https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path - checkout: ${{ parameters.sourceBranch }} - path: s + path: source-branch workspaceRepo: true displayName: 🟣 Checkout source branch # There is a warning when performing the second checkout, ##[warning]Unable move and reuse existing repository to required location. @@ -24,5 +24,5 @@ steps: displayName: 🟣 Checkout eng branch # The \* is required for the Exclude to work properly. # See: https://stackoverflow.com/a/67407481/294804 -- powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng-branch\*" -Destination "$(Agent.BuildDirectory)\s" -Exclude '.git','.gitignore' -Recurse -Force +- powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng-branch\*" -Destination "$(Agent.BuildDirectory)\source-branch" -Exclude '.git','.gitignore' -Recurse -Force displayName: 🟣 Copy eng-branch to source-branch \ No newline at end of file From 5278fef8522752046ceae9fdfa5b40a2908ac21b Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 6 Aug 2025 16:25:32 -0700 Subject: [PATCH 029/118] Add an empty CredScanSuppressions file. Change Build.SourcesDirectory to Build.Repository.LocalPath. --- .config/CredScanSuppressions.json | 4 ++++ eng/pipelines/official.yml | 12 +++++++----- eng/pipelines/templates/jobs/workload-build.yml | 6 +++--- eng/pipelines/templates/steps/workload-checkout.yml | 2 +- 4 files changed, 15 insertions(+), 9 deletions(-) create mode 100644 .config/CredScanSuppressions.json diff --git a/.config/CredScanSuppressions.json b/.config/CredScanSuppressions.json new file mode 100644 index 000000000..0e52eb402 --- /dev/null +++ b/.config/CredScanSuppressions.json @@ -0,0 +1,4 @@ +{ + "tool": "Credential Scanner", + "suppressions": [] +} diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 08054eee4..14eff3540 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -140,6 +140,8 @@ extends: # Variables for TSA set within the DotNet-CLI-SDLValidation-Params variable group. tsa: enabled: true + credscan: + suppressionsFile: $(Build.Repository.LocalPath)\.config\CredScanSuppressions.json stages: - stage: Build displayName: Build @@ -177,7 +179,7 @@ extends: displayName: 🟣 Download build artifacts inputs: artifactName: Artifacts - targetPath: $(Build.SourcesDirectory)/artifacts + targetPath: $(Build.Repository.LocalPath)/artifacts # 1ES docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/outputs/nuget-packages # DotNetCoreCLI@2 docs: https://learn.microsoft.com/en-us/azure/devops/pipelines/tasks/reference/dotnet-core-cli-v2 - ${{ if eq(parameters.publishToAzDO, true) }}: @@ -185,16 +187,16 @@ extends: displayName: 🟣 Publish packages to AzDO inputs: useDotNetTask: true - packagesToPush: $(Build.SourcesDirectory)/artifacts/packages/**/*.nupkg - packageParentPath: $(Build.SourcesDirectory)/artifacts + packagesToPush: $(Build.Repository.LocalPath)/artifacts/packages/**/*.nupkg + packageParentPath: $(Build.Repository.LocalPath)/artifacts publishVstsFeed: ${{ parameters.azDOPublishFeed }} - ${{ if eq(parameters.publishToNuGet, true) }}: - task: 1ES.PublishNuget@1 displayName: 🟣 Publish packages to NuGet.org inputs: useDotNetTask: false - packagesToPush: $(Build.SourcesDirectory)/artifacts/packages/**/*.nupkg - packageParentPath: $(Build.SourcesDirectory)/artifacts + packagesToPush: $(Build.Repository.LocalPath)/artifacts/packages/**/*.nupkg + packageParentPath: $(Build.Repository.LocalPath)/artifacts nuGetFeedType: external publishVstsFeed: https://api.nuget.org/v3/index.json # Service connection: https://dev.azure.com/dnceng/internal/_settings/adminservices?resourceId=479fdc43-a27d-4f5f-b2fc-5cf19dce159a diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index a2d80d026..282f30137 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -44,10 +44,10 @@ jobs: inputs: azureSubscription: DotNetStaging scriptType: pscore - scriptPath: $(Build.SourcesDirectory)/eng/download-workloads.ps1 + scriptPath: $(Build.Repository.LocalPath)/eng/download-workloads.ps1 # Note: The second $ for usePreComponents and includeNonShipping allows the value to resolve as `$true` or `$false`. arguments: >- - -workloadPath '$(Build.SourcesDirectory)/artifacts/workloads' + -workloadPath '$(Build.Repository.LocalPath)/artifacts/workloads' -gitHubPat (ConvertTo-SecureString -String '$(BotAccount-dotnet-bot-repo-PAT)' -AsPlainText -Force) -azDOPat (ConvertTo-SecureString -String '$(dn-bot-all-drop-rw-code-rw-release-all)' -AsPlainText -Force) -workloadListJson '${{ convertToJson(parameters.workloadDropNames) }}' @@ -60,7 +60,7 @@ jobs: -restore -build -sign -pack -publish -ci -msbuildEngine vs -configuration $(_BuildConfig) /p:CreateVSInsertion=${{ parameters.createVSInsertion }} - /p:WorkloadDirectory=$(Build.SourcesDirectory)/artifacts/workloads + /p:WorkloadDirectory=$(Build.Repository.LocalPath)/artifacts/workloads /p:DotNetSignType=$(_SignType) /p:TeamName=$(_TeamName) /p:DotNetPublishUsingPipelines=true diff --git a/eng/pipelines/templates/steps/workload-checkout.yml b/eng/pipelines/templates/steps/workload-checkout.yml index ee32da50f..5f82b475b 100644 --- a/eng/pipelines/templates/steps/workload-checkout.yml +++ b/eng/pipelines/templates/steps/workload-checkout.yml @@ -24,5 +24,5 @@ steps: displayName: 🟣 Checkout eng branch # The \* is required for the Exclude to work properly. # See: https://stackoverflow.com/a/67407481/294804 -- powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng-branch\*" -Destination "$(Agent.BuildDirectory)\source-branch" -Exclude '.git','.gitignore' -Recurse -Force +- powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng-branch\*" -Destination "$(Agent.BuildDirectory)\source-branch" -Exclude '.git','.gitignore','.config\tsaoptions.json' -Recurse -Force displayName: 🟣 Copy eng-branch to source-branch \ No newline at end of file From 8ee8c3f3a733c32fb44cdcbfac7a477b0c99bf4e Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 7 Aug 2025 14:01:24 -0700 Subject: [PATCH 030/118] Updated tsaoptions path. --- eng/pipelines/official.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 14eff3540..a61cace18 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -128,6 +128,7 @@ extends: template: v1/1ES.Official.PipelineTemplate.yml@1esPipelines parameters: sdl: + # Docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/sdlanalysis/sourceanalysisstage#my-pipeline-uses-multiple-repositories-how-to-ensure-that-sdl-sources-stage-is-injected-for-all-the-repositories sourceRepositoriesToScan: include: - repository: source @@ -138,8 +139,11 @@ extends: policheck: enabled: true # Variables for TSA set within the DotNet-CLI-SDLValidation-Params variable group. + # Docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/sdlanalysis/tsasupport tsa: enabled: true + configFile: $(Build.Repository.LocalPath)\.config\tsaoptions.json + # Docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/sdlanalysis/credscan#customizations credscan: suppressionsFile: $(Build.Repository.LocalPath)\.config\CredScanSuppressions.json stages: From 58845cc68a5587006cecc6bac01f417bfa4cfa8d Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 7 Aug 2025 17:02:29 -0700 Subject: [PATCH 031/118] Added debug step. --- eng/pipelines/templates/steps/workload-checkout.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/eng/pipelines/templates/steps/workload-checkout.yml b/eng/pipelines/templates/steps/workload-checkout.yml index 5f82b475b..868585cd0 100644 --- a/eng/pipelines/templates/steps/workload-checkout.yml +++ b/eng/pipelines/templates/steps/workload-checkout.yml @@ -24,5 +24,7 @@ steps: displayName: 🟣 Checkout eng branch # The \* is required for the Exclude to work properly. # See: https://stackoverflow.com/a/67407481/294804 -- powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng-branch\*" -Destination "$(Agent.BuildDirectory)\source-branch" -Exclude '.git','.gitignore','.config\tsaoptions.json' -Recurse -Force - displayName: 🟣 Copy eng-branch to source-branch \ No newline at end of file +- powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng-branch\*" -Destination "$(Agent.BuildDirectory)\source-branch" -Exclude '.git','.gitignore','.config' -Recurse -Force + displayName: 🟣 Copy eng-branch to source-branch +- powershell: 'Get-ChildItem env:' + displayName: 🟣 List Environment Variables \ No newline at end of file From 2ea6b0d694bb079d0c81471163487b59893a2660 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 7 Aug 2025 17:35:46 -0700 Subject: [PATCH 032/118] Added a repositoryAlias for the jobs template. --- eng/pipelines/templates/jobs/workload-build.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 282f30137..a5e1489ef 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -18,6 +18,7 @@ jobs: enablePublishUsingPipelines: true publishAssetsImmediately: true enableSbom: true + repositoryAlias: source artifacts: publish: artifacts: From 9165acbc2bfe1fa7e1537162926da365eb2ba9e1 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 7 Aug 2025 17:51:58 -0700 Subject: [PATCH 033/118] Changing to System.DefaultWorkingDirectory. --- eng/pipelines/official.yml | 14 +++++++------- eng/pipelines/templates/jobs/workload-build.yml | 6 +++--- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index a61cace18..445285d7e 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -142,10 +142,10 @@ extends: # Docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/sdlanalysis/tsasupport tsa: enabled: true - configFile: $(Build.Repository.LocalPath)\.config\tsaoptions.json + configFile: $(System.DefaultWorkingDirectory)\.config\tsaoptions.json # Docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/sdlanalysis/credscan#customizations credscan: - suppressionsFile: $(Build.Repository.LocalPath)\.config\CredScanSuppressions.json + suppressionsFile: $(System.DefaultWorkingDirectory)\.config\CredScanSuppressions.json stages: - stage: Build displayName: Build @@ -183,7 +183,7 @@ extends: displayName: 🟣 Download build artifacts inputs: artifactName: Artifacts - targetPath: $(Build.Repository.LocalPath)/artifacts + targetPath: $(System.DefaultWorkingDirectory)/artifacts # 1ES docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/outputs/nuget-packages # DotNetCoreCLI@2 docs: https://learn.microsoft.com/en-us/azure/devops/pipelines/tasks/reference/dotnet-core-cli-v2 - ${{ if eq(parameters.publishToAzDO, true) }}: @@ -191,16 +191,16 @@ extends: displayName: 🟣 Publish packages to AzDO inputs: useDotNetTask: true - packagesToPush: $(Build.Repository.LocalPath)/artifacts/packages/**/*.nupkg - packageParentPath: $(Build.Repository.LocalPath)/artifacts + packagesToPush: $(System.DefaultWorkingDirectory)/artifacts/packages/**/*.nupkg + packageParentPath: $(System.DefaultWorkingDirectory)/artifacts publishVstsFeed: ${{ parameters.azDOPublishFeed }} - ${{ if eq(parameters.publishToNuGet, true) }}: - task: 1ES.PublishNuget@1 displayName: 🟣 Publish packages to NuGet.org inputs: useDotNetTask: false - packagesToPush: $(Build.Repository.LocalPath)/artifacts/packages/**/*.nupkg - packageParentPath: $(Build.Repository.LocalPath)/artifacts + packagesToPush: $(System.DefaultWorkingDirectory)/artifacts/packages/**/*.nupkg + packageParentPath: $(System.DefaultWorkingDirectory)/artifacts nuGetFeedType: external publishVstsFeed: https://api.nuget.org/v3/index.json # Service connection: https://dev.azure.com/dnceng/internal/_settings/adminservices?resourceId=479fdc43-a27d-4f5f-b2fc-5cf19dce159a diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index a5e1489ef..a4de24fd7 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -45,10 +45,10 @@ jobs: inputs: azureSubscription: DotNetStaging scriptType: pscore - scriptPath: $(Build.Repository.LocalPath)/eng/download-workloads.ps1 + scriptPath: $(System.DefaultWorkingDirectory)/eng/download-workloads.ps1 # Note: The second $ for usePreComponents and includeNonShipping allows the value to resolve as `$true` or `$false`. arguments: >- - -workloadPath '$(Build.Repository.LocalPath)/artifacts/workloads' + -workloadPath '$(System.DefaultWorkingDirectory)/artifacts/workloads' -gitHubPat (ConvertTo-SecureString -String '$(BotAccount-dotnet-bot-repo-PAT)' -AsPlainText -Force) -azDOPat (ConvertTo-SecureString -String '$(dn-bot-all-drop-rw-code-rw-release-all)' -AsPlainText -Force) -workloadListJson '${{ convertToJson(parameters.workloadDropNames) }}' @@ -61,7 +61,7 @@ jobs: -restore -build -sign -pack -publish -ci -msbuildEngine vs -configuration $(_BuildConfig) /p:CreateVSInsertion=${{ parameters.createVSInsertion }} - /p:WorkloadDirectory=$(Build.Repository.LocalPath)/artifacts/workloads + /p:WorkloadDirectory=$(System.DefaultWorkingDirectory)/artifacts/workloads /p:DotNetSignType=$(_SignType) /p:TeamName=$(_TeamName) /p:DotNetPublishUsingPipelines=true From 161359273461865858d1978b27745b49bbcc2344 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 8 Aug 2025 16:06:10 -0700 Subject: [PATCH 034/118] Debug cleanup. --- eng/pipelines/official.yml | 7 ------- eng/pipelines/templates/steps/workload-checkout.yml | 4 +--- 2 files changed, 1 insertion(+), 10 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 445285d7e..0cc247d92 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -1,12 +1,5 @@ # Pipeline: https://dev.azure.com/dnceng/internal/_build?definitionId=1298 -# trigger: -# batch: true -# branches: -# include: -# - main -# - release/* - # Note: This pipeline can never run automatically because the sourceBranch parameter is required. trigger: none pr: none diff --git a/eng/pipelines/templates/steps/workload-checkout.yml b/eng/pipelines/templates/steps/workload-checkout.yml index 868585cd0..dfa348d83 100644 --- a/eng/pipelines/templates/steps/workload-checkout.yml +++ b/eng/pipelines/templates/steps/workload-checkout.yml @@ -25,6 +25,4 @@ steps: # The \* is required for the Exclude to work properly. # See: https://stackoverflow.com/a/67407481/294804 - powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng-branch\*" -Destination "$(Agent.BuildDirectory)\source-branch" -Exclude '.git','.gitignore','.config' -Recurse -Force - displayName: 🟣 Copy eng-branch to source-branch -- powershell: 'Get-ChildItem env:' - displayName: 🟣 List Environment Variables \ No newline at end of file + displayName: 🟣 Copy eng-branch to source-branch \ No newline at end of file From 79a91aa0d5fd52f0ffcdd50e5e331e86e61409e9 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 12 Aug 2025 18:26:36 -0700 Subject: [PATCH 035/118] Renamed the files back as we want them to be the original names. They need to have different contents, so we add them to the ignore list when cloning. --- build-eng.cmd => build.cmd | 0 eng/pipelines/{public-eng.yml => public.yml} | 0 eng/pipelines/templates/steps/workload-checkout.yml | 2 +- 3 files changed, 1 insertion(+), 1 deletion(-) rename build-eng.cmd => build.cmd (100%) rename eng/pipelines/{public-eng.yml => public.yml} (100%) diff --git a/build-eng.cmd b/build.cmd similarity index 100% rename from build-eng.cmd rename to build.cmd diff --git a/eng/pipelines/public-eng.yml b/eng/pipelines/public.yml similarity index 100% rename from eng/pipelines/public-eng.yml rename to eng/pipelines/public.yml diff --git a/eng/pipelines/templates/steps/workload-checkout.yml b/eng/pipelines/templates/steps/workload-checkout.yml index dfa348d83..b2f2fdd91 100644 --- a/eng/pipelines/templates/steps/workload-checkout.yml +++ b/eng/pipelines/templates/steps/workload-checkout.yml @@ -24,5 +24,5 @@ steps: displayName: 🟣 Checkout eng branch # The \* is required for the Exclude to work properly. # See: https://stackoverflow.com/a/67407481/294804 -- powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng-branch\*" -Destination "$(Agent.BuildDirectory)\source-branch" -Exclude '.git','.gitignore','.config' -Recurse -Force +- powershell: Copy-Item -Path "$(Agent.BuildDirectory)\eng-branch\*" -Destination "$(Agent.BuildDirectory)\source-branch" -Exclude '.git','.config','.gitignore','build.cmd','eng\pipelines\public.yml' -Recurse -Force displayName: 🟣 Copy eng-branch to source-branch \ No newline at end of file From ac020e3092574a17252056f090668d661e34aeb2 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 13 Aug 2025 16:17:03 -0700 Subject: [PATCH 036/118] Trying to figure out the PR build for the eng branch itself. Trying something. --- eng/pipelines/public.yml | 46 ++++++------------- .../stages/workload-public-build.yml | 8 +++- 2 files changed, 21 insertions(+), 33 deletions(-) diff --git a/eng/pipelines/public.yml b/eng/pipelines/public.yml index 250f2e788..7749aa27f 100644 --- a/eng/pipelines/public.yml +++ b/eng/pipelines/public.yml @@ -8,38 +8,22 @@ pr: trigger: none +resources: + repositories: + - repository: source + type: github + name: dotnet/workload-versions + ref: refs/heads/main + # https://dev.azure.com/dnceng-public/public/_settings/adminservices?resourceId=690f39b4-7746-42c2-be89-281bd7c78b9e + endpoint: public + variables: +- template: /eng/pipelines/templates/variables/workload-public.yml@self # Variables used: DncEngPublicBuildPool -- template: /eng/common/templates/variables/pool-providers.yml -############### ARCADE ############### -- name: _SignType - value: test -- name: _TeamName - value: DotNet-Cli -- name: _BuildConfig - value: Release -- name: PostBuildSign - value: true +- template: /eng/common/templates/variables/pool-providers.yml@source stages: -- stage: Build - displayName: Build - jobs: - - template: /eng/common/templates/job/job.yml - parameters: - name: buildRepo - displayName: Build Repo - pool: - name: $(DncEngPublicBuildPool) - demands: ImageOverride -equals windows.vs2022.amd64.open - artifacts: - publish: - logs: true - steps: - - powershell: >- - eng/common/build.ps1 - -restore -build -pack -ci -msbuildEngine vs - -configuration $(_BuildConfig) - /p:DotNetSignType=$(_SignType) - /p:TeamName=$(_TeamName) - displayName: 🟣 Build solution \ No newline at end of file +- template: /eng/pipelines/templates/stages/workload-public-build.yml@self + parameters: + sourceBranch: source + engBranch: self \ No newline at end of file diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index caf5eaa98..1aaed8faf 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -1,8 +1,12 @@ +parameters: + sourceBranch: self + engBranch: eng + stages: - stage: Build displayName: Build jobs: - - template: /eng/common/templates/job/job.yml@self + - template: /eng/common/templates/job/job.yml@${{ parameters.sourceBranch }} parameters: name: buildRepo displayName: Build Repo @@ -13,7 +17,7 @@ stages: publish: logs: true steps: - - template: /eng/pipelines/templates/steps/workload-checkout.yml@eng + - template: /eng/pipelines/templates/steps/workload-checkout.yml@${{ parameters.engBranch }} - powershell: >- eng/common/build.ps1 -restore -build -pack -ci -msbuildEngine vs From 9f4b57990b33f92af976410d55cd3769a548aad9 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 13 Aug 2025 16:19:58 -0700 Subject: [PATCH 037/118] Forgot to pass the params to the checkout template. --- eng/pipelines/templates/stages/workload-public-build.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index 1aaed8faf..5440fdc8b 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -18,6 +18,9 @@ stages: logs: true steps: - template: /eng/pipelines/templates/steps/workload-checkout.yml@${{ parameters.engBranch }} + parameters: + sourceBranch: ${{ parameters.sourceBranch }} + engBranch: ${{ parameters.engBranch }} - powershell: >- eng/common/build.ps1 -restore -build -pack -ci -msbuildEngine vs From bb64dbdf3e07235293758128d86027fcf00ca776 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 13 Aug 2025 17:09:35 -0700 Subject: [PATCH 038/118] Added building the 8 and 9 branches along with main for PRs. --- eng/pipelines/public.yml | 24 +++++++++++++++++-- .../stages/workload-public-build.yml | 4 ++-- 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/eng/pipelines/public.yml b/eng/pipelines/public.yml index 7749aa27f..51be69506 100644 --- a/eng/pipelines/public.yml +++ b/eng/pipelines/public.yml @@ -10,12 +10,24 @@ trigger: none resources: repositories: - - repository: source + - repository: main type: github name: dotnet/workload-versions ref: refs/heads/main # https://dev.azure.com/dnceng-public/public/_settings/adminservices?resourceId=690f39b4-7746-42c2-be89-281bd7c78b9e endpoint: public + - repository: release-8 + type: github + name: dotnet/workload-versions + ref: refs/heads/release/8.0.4xx + # https://dev.azure.com/dnceng-public/public/_settings/adminservices?resourceId=690f39b4-7746-42c2-be89-281bd7c78b9e + endpoint: public + - repository: release-9 + type: github + name: dotnet/workload-versions + ref: refs/heads/release/9.0.3xx + # https://dev.azure.com/dnceng-public/public/_settings/adminservices?resourceId=690f39b4-7746-42c2-be89-281bd7c78b9e + endpoint: public variables: - template: /eng/pipelines/templates/variables/workload-public.yml@self @@ -25,5 +37,13 @@ variables: stages: - template: /eng/pipelines/templates/stages/workload-public-build.yml@self parameters: - sourceBranch: source + sourceBranch: main + engBranch: self +- template: /eng/pipelines/templates/stages/workload-public-build.yml@self + parameters: + sourceBranch: release-8 + engBranch: self +- template: /eng/pipelines/templates/stages/workload-public-build.yml@self + parameters: + sourceBranch: release-9 engBranch: self \ No newline at end of file diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index 5440fdc8b..e02f921ad 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -3,8 +3,8 @@ parameters: engBranch: eng stages: -- stage: Build - displayName: Build +- stage: Build_${{ parameters.sourceBranch }} + displayName: Build ${{ parameters.sourceBranch }} jobs: - template: /eng/common/templates/job/job.yml@${{ parameters.sourceBranch }} parameters: From 3c0e9e20721a06ebd7f41c314ca8971a4307f2b2 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 13 Aug 2025 17:10:54 -0700 Subject: [PATCH 039/118] Whoops. Forgot to reference main for the pool variables. --- eng/pipelines/public.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/pipelines/public.yml b/eng/pipelines/public.yml index 51be69506..1ae78b958 100644 --- a/eng/pipelines/public.yml +++ b/eng/pipelines/public.yml @@ -32,7 +32,7 @@ resources: variables: - template: /eng/pipelines/templates/variables/workload-public.yml@self # Variables used: DncEngPublicBuildPool -- template: /eng/common/templates/variables/pool-providers.yml@source +- template: /eng/common/templates/variables/pool-providers.yml@main stages: - template: /eng/pipelines/templates/stages/workload-public-build.yml@self From 0692ba3b0cc984c272ea2c780ad37661bcaee583 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 13 Aug 2025 17:12:34 -0700 Subject: [PATCH 040/118] The repository names were valid but not valid when used as a stage name. --- eng/pipelines/public.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/eng/pipelines/public.yml b/eng/pipelines/public.yml index 1ae78b958..d7ac0dfd5 100644 --- a/eng/pipelines/public.yml +++ b/eng/pipelines/public.yml @@ -16,13 +16,13 @@ resources: ref: refs/heads/main # https://dev.azure.com/dnceng-public/public/_settings/adminservices?resourceId=690f39b4-7746-42c2-be89-281bd7c78b9e endpoint: public - - repository: release-8 + - repository: release8 type: github name: dotnet/workload-versions ref: refs/heads/release/8.0.4xx # https://dev.azure.com/dnceng-public/public/_settings/adminservices?resourceId=690f39b4-7746-42c2-be89-281bd7c78b9e endpoint: public - - repository: release-9 + - repository: release9 type: github name: dotnet/workload-versions ref: refs/heads/release/9.0.3xx @@ -41,9 +41,9 @@ stages: engBranch: self - template: /eng/pipelines/templates/stages/workload-public-build.yml@self parameters: - sourceBranch: release-8 + sourceBranch: release8 engBranch: self - template: /eng/pipelines/templates/stages/workload-public-build.yml@self parameters: - sourceBranch: release-9 + sourceBranch: release9 engBranch: self \ No newline at end of file From 9e74472fc15aa2c5a3dd88cfc34a3b18354b22fc Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 13 Aug 2025 17:15:40 -0700 Subject: [PATCH 041/118] Allow the stages to run in parallel. --- eng/pipelines/public.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eng/pipelines/public.yml b/eng/pipelines/public.yml index d7ac0dfd5..2c83ce202 100644 --- a/eng/pipelines/public.yml +++ b/eng/pipelines/public.yml @@ -40,10 +40,12 @@ stages: sourceBranch: main engBranch: self - template: /eng/pipelines/templates/stages/workload-public-build.yml@self + dependsOn: [] parameters: sourceBranch: release8 engBranch: self - template: /eng/pipelines/templates/stages/workload-public-build.yml@self + dependsOn: [] parameters: sourceBranch: release9 engBranch: self \ No newline at end of file From 6a300e703b646545c87ee76ab97ad9eab559142a Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 13 Aug 2025 17:17:38 -0700 Subject: [PATCH 042/118] Oh, dependsOn needs to be in the stage template itself so it can run in parallel. --- eng/pipelines/public.yml | 2 -- eng/pipelines/templates/stages/workload-public-build.yml | 1 + 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/eng/pipelines/public.yml b/eng/pipelines/public.yml index 2c83ce202..d7ac0dfd5 100644 --- a/eng/pipelines/public.yml +++ b/eng/pipelines/public.yml @@ -40,12 +40,10 @@ stages: sourceBranch: main engBranch: self - template: /eng/pipelines/templates/stages/workload-public-build.yml@self - dependsOn: [] parameters: sourceBranch: release8 engBranch: self - template: /eng/pipelines/templates/stages/workload-public-build.yml@self - dependsOn: [] parameters: sourceBranch: release9 engBranch: self \ No newline at end of file diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index e02f921ad..af43a3d3a 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -5,6 +5,7 @@ parameters: stages: - stage: Build_${{ parameters.sourceBranch }} displayName: Build ${{ parameters.sourceBranch }} + dependsOn: [] jobs: - template: /eng/common/templates/job/job.yml@${{ parameters.sourceBranch }} parameters: From 2d2eebc168390f3957f669935e18e3886395d5fa Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 13 Aug 2025 17:24:46 -0700 Subject: [PATCH 043/118] Use the appropriately updated branches for 8 and 9 currently. --- eng/pipelines/public.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/eng/pipelines/public.yml b/eng/pipelines/public.yml index d7ac0dfd5..f44103a22 100644 --- a/eng/pipelines/public.yml +++ b/eng/pipelines/public.yml @@ -19,13 +19,15 @@ resources: - repository: release8 type: github name: dotnet/workload-versions - ref: refs/heads/release/8.0.4xx + # TODO: Update when the changes are merged to the appropriate release branch. + ref: refs/heads/feature/centralized-ci-8 # https://dev.azure.com/dnceng-public/public/_settings/adminservices?resourceId=690f39b4-7746-42c2-be89-281bd7c78b9e endpoint: public - repository: release9 type: github name: dotnet/workload-versions - ref: refs/heads/release/9.0.3xx + # TODO: Update when the changes are merged to the appropriate release branch. + ref: refs/heads/feature/centralized-ci-9 # https://dev.azure.com/dnceng-public/public/_settings/adminservices?resourceId=690f39b4-7746-42c2-be89-281bd7c78b9e endpoint: public From eb62c615cfcbb3c7c3fdaf6cb899b5b65f47ce36 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 13 Aug 2025 17:26:21 -0700 Subject: [PATCH 044/118] Forgot to do the same for main. --- eng/pipelines/public.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/eng/pipelines/public.yml b/eng/pipelines/public.yml index f44103a22..dd6d8a0b9 100644 --- a/eng/pipelines/public.yml +++ b/eng/pipelines/public.yml @@ -13,7 +13,8 @@ resources: - repository: main type: github name: dotnet/workload-versions - ref: refs/heads/main + # TODO: Update when the changes are merged to the main branch. + ref: refs/heads/feature/centralized-ci # https://dev.azure.com/dnceng-public/public/_settings/adminservices?resourceId=690f39b4-7746-42c2-be89-281bd7c78b9e endpoint: public - repository: release8 From 0d6a0bafd4fe0dd4bf67ffcfdb989c0ab2075528 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 13 Aug 2025 17:31:28 -0700 Subject: [PATCH 045/118] Fix log name publishing. --- eng/pipelines/templates/stages/workload-public-build.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index af43a3d3a..5053372ac 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -16,7 +16,8 @@ stages: demands: ImageOverride -equals windows.vs2022.amd64.open artifacts: publish: - logs: true + logs: + name: Logs_${{ parameters.sourceBranch }} steps: - template: /eng/pipelines/templates/steps/workload-checkout.yml@${{ parameters.engBranch }} parameters: From f4560967a5a85fb995062b2b5e9ad3f1ad8b5b23 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 14 Aug 2025 15:11:36 -0700 Subject: [PATCH 046/118] Added local build logic and required gitignore contents. --- .gitignore | 18 +++++++++++++++++- build.cmd | 12 ++++++++++++ eng/pipelines/official.yml | 1 + 3 files changed, 30 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 25bf790b2..0db5ef472 100644 --- a/.gitignore +++ b/.gitignore @@ -41,4 +41,20 @@ cmake/ .dotnet.payload # MSBuild Logs -**/MSBuild_Logs/MSBuild_pid-*.failure.txt \ No newline at end of file +**/MSBuild_Logs/MSBuild_pid-*.failure.txt + +############################################################################### +# Source branch specific files +############################################################################### +doc/ +eng/common/ +eng/pipelines/public.yml +eng/Publishing.props +eng/Signing.props +eng/Version.Details.xml +eng/Version.Details.json +CODE-OF-CONDUCT.md +global.json +LICENSE.md +NuGet.config +README.md \ No newline at end of file diff --git a/build.cmd b/build.cmd index fe43bbf96..2ea73e530 100644 --- a/build.cmd +++ b/build.cmd @@ -1,3 +1,15 @@ @echo off +if "%1"=="" ( + echo Error: sourceBranch argument is required + echo Usage: build.cmd ^ [additional arguments...] + exit /b 1 +) +set SOURCE_BRANCH=%1 +rem Remove sourceBranch from argument list so it's not passed to the build.ps1 script. +shift +for /f %%i in ('git config --get remote.origin.url') do set REPO_URL=%%i +git clone -b %SOURCE_BRANCH% %REPO_URL% source-branch +robocopy "source-branch" "." /E /XO /XD ".git" ".config" /XF ".gitignore" "build.cmd" "public.yml" /NJH /NJS /NP /NFL /NDL +rmdir /s /q "source-branch" powershell -NoLogo -NoProfile -ExecutionPolicy ByPass -Command "& """%~dp0eng\common\build.ps1""" -restore -build -msbuildEngine vs %*" exit /b %ErrorLevel% diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 0cc247d92..0add76a6e 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -160,6 +160,7 @@ extends: jobs: - deployment: PublishFeed displayName: Publish to feed + # Environment: https://dnceng.visualstudio.com/internal/_environments/42 environment: DotNet-SDK-Workloads pool: name: $(DncEngInternalBuildPool) From c4f9a8bd39fc93ddee2bffc950ffdae83511360b Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 14 Aug 2025 16:14:01 -0700 Subject: [PATCH 047/118] Fixed doing the local build by storing the arguments and skipping the first argument. Fixed incorrect Versions.props path. --- .gitignore | 2 +- build.cmd | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index 0db5ef472..81be12acf 100644 --- a/.gitignore +++ b/.gitignore @@ -52,7 +52,7 @@ eng/pipelines/public.yml eng/Publishing.props eng/Signing.props eng/Version.Details.xml -eng/Version.Details.json +eng/Versions.props CODE-OF-CONDUCT.md global.json LICENSE.md diff --git a/build.cmd b/build.cmd index 2ea73e530..980323a8d 100644 --- a/build.cmd +++ b/build.cmd @@ -5,11 +5,12 @@ if "%1"=="" ( exit /b 1 ) set SOURCE_BRANCH=%1 -rem Remove sourceBranch from argument list so it's not passed to the build.ps1 script. -shift +rem Keep the arguments, excluding the first (SOURCE_BRANCH) +set "REMAINING_ARGS=" +for /f "tokens=1* delims= " %%a in ("%*") do set "REMAINING_ARGS=%%b" for /f %%i in ('git config --get remote.origin.url') do set REPO_URL=%%i git clone -b %SOURCE_BRANCH% %REPO_URL% source-branch robocopy "source-branch" "." /E /XO /XD ".git" ".config" /XF ".gitignore" "build.cmd" "public.yml" /NJH /NJS /NP /NFL /NDL rmdir /s /q "source-branch" -powershell -NoLogo -NoProfile -ExecutionPolicy ByPass -Command "& """%~dp0eng\common\build.ps1""" -restore -build -msbuildEngine vs %*" +powershell -NoLogo -NoProfile -ExecutionPolicy ByPass -Command "& """%~dp0eng\common\build.ps1""" -restore -build -msbuildEngine vs %REMAINING_ARGS%" exit /b %ErrorLevel% From 18e6395f4837999504e330a298920523b33fb117 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 19 Aug 2025 15:55:18 -0700 Subject: [PATCH 048/118] Added enabling AutoFlush when calculating the hash to avoid an OutOfMemory exception. --- eng/create-workload-drops.ps1 | 3 +++ 1 file changed, 3 insertions(+) diff --git a/eng/create-workload-drops.ps1 b/eng/create-workload-drops.ps1 index 168f0a8df..fe99ee304 100644 --- a/eng/create-workload-drops.ps1 +++ b/eng/create-workload-drops.ps1 @@ -30,6 +30,9 @@ Get-ChildItem -Path $workloadDropPath -Directory | ForEach-Object { # See: https://learn.microsoft.com/powershell/module/microsoft.powershell.utility/get-filehash#example-4-compute-the-hash-of-a-string $contentStream = [System.IO.MemoryStream]::new() $writer = [System.IO.StreamWriter]::new($contentStream) + # Automatically flushes the buffer after every Write call (necessary for workloads such as MAUI with a large number of files). + # See: https://learn.microsoft.com/dotnet/api/system.io.streamwriter.autoflush + $writer.AutoFlush = $true $dropFiles = Get-ChildItem -Path $dropDir | Sort-Object # Note: We're using ASCII because when testing between PS 5.1 and PS 7.5, this would result in the same hash. Other encodings arrived at different hashes. $null = $dropFiles | Get-Content -Encoding ASCII -Raw | ForEach-Object { $writer.Write($_) } From 5e01baf3e24cc9b16808584bdce708cff1c95827 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 19 Aug 2025 16:22:48 -0700 Subject: [PATCH 049/118] Trying using a foreach loop instead of piping. --- eng/create-workload-drops.ps1 | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/eng/create-workload-drops.ps1 b/eng/create-workload-drops.ps1 index fe99ee304..f58af1e36 100644 --- a/eng/create-workload-drops.ps1 +++ b/eng/create-workload-drops.ps1 @@ -34,8 +34,12 @@ Get-ChildItem -Path $workloadDropPath -Directory | ForEach-Object { # See: https://learn.microsoft.com/dotnet/api/system.io.streamwriter.autoflush $writer.AutoFlush = $true $dropFiles = Get-ChildItem -Path $dropDir | Sort-Object - # Note: We're using ASCII because when testing between PS 5.1 and PS 7.5, this would result in the same hash. Other encodings arrived at different hashes. - $null = $dropFiles | Get-Content -Encoding ASCII -Raw | ForEach-Object { $writer.Write($_) } + foreach ($dropFile in $dropFiles) + { + # Note: We're using ASCII because when testing between PS 5.1 and PS 7.5, this would result in the same hash. Other encodings arrived at different hashes. + $fileContent = Get-Content -Path $dropFile.FullName -Encoding ASCII -Raw + $null = $writer.Write($fileContent) + } $writer.Flush() $contentStream.Position = 0 $dropHash = (Get-FileHash -InputStream $contentStream).Hash From 345504e2b4978b286d2b61b6ac072fe09e61ad1d Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 19 Aug 2025 16:39:23 -0700 Subject: [PATCH 050/118] Debugging the file that is causing the memory exception. --- eng/create-workload-drops.ps1 | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/eng/create-workload-drops.ps1 b/eng/create-workload-drops.ps1 index f58af1e36..879850565 100644 --- a/eng/create-workload-drops.ps1 +++ b/eng/create-workload-drops.ps1 @@ -36,8 +36,15 @@ Get-ChildItem -Path $workloadDropPath -Directory | ForEach-Object { $dropFiles = Get-ChildItem -Path $dropDir | Sort-Object foreach ($dropFile in $dropFiles) { - # Note: We're using ASCII because when testing between PS 5.1 and PS 7.5, this would result in the same hash. Other encodings arrived at different hashes. - $fileContent = Get-Content -Path $dropFile.FullName -Encoding ASCII -Raw + try { + # Note: We're using ASCII because when testing between PS 5.1 and PS 7.5, this would result in the same hash. Other encodings arrived at different hashes. + $fileContent = Get-Content -Path $dropFile.FullName -Encoding ASCII -Raw + } catch { + Write-Host "Error: $($_.Exception.Message)" + Write-Host "Type: $($_.Exception.GetType().FullName)" + Write-Host "File: $($dropFile.FullName)" + continue + } $null = $writer.Write($fileContent) } $writer.Flush() From f776abb7492d5e1545b284b390feb8af3a55c116 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 19 Aug 2025 17:08:55 -0700 Subject: [PATCH 051/118] Setting the ErrorAction Stop on the Get-Content call. --- eng/create-workload-drops.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/create-workload-drops.ps1 b/eng/create-workload-drops.ps1 index 879850565..630fbfc49 100644 --- a/eng/create-workload-drops.ps1 +++ b/eng/create-workload-drops.ps1 @@ -38,7 +38,7 @@ Get-ChildItem -Path $workloadDropPath -Directory | ForEach-Object { { try { # Note: We're using ASCII because when testing between PS 5.1 and PS 7.5, this would result in the same hash. Other encodings arrived at different hashes. - $fileContent = Get-Content -Path $dropFile.FullName -Encoding ASCII -Raw + $fileContent = Get-Content -Path $dropFile.FullName -Encoding ASCII -Raw -ErrorAction Stop } catch { Write-Host "Error: $($_.Exception.Message)" Write-Host "Type: $($_.Exception.GetType().FullName)" From 4dcc2802035ce543b1aa6be96d3122131a577b21 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 3 Sep 2025 16:47:42 -0700 Subject: [PATCH 052/118] Added readme to explain the files within the .config folder. --- .config/README.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 .config/README.md diff --git a/.config/README.md b/.config/README.md new file mode 100644 index 000000000..ae09a2576 --- /dev/null +++ b/.config/README.md @@ -0,0 +1,13 @@ +## Files + +### CredScanSuppressions.json + +This file intentionally contains an empty suppressions list. Simply having this file works around a bug in 1ES PT when doing multi-repo checkout and running CredScan. It somehow adds a folder path to `-Sp`, which is the suppression file argument, which is not a valid value for this argument. Simply providing a suppression file (even without any suppressions like this) avoids this problem, as this is the default location for the file, so it uses this one automatically. + +For additional information, see: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/sdlanalysis/credscan + +### tsaoptions.json + +This file provides the basic information about our team internally within AzDO which the TSA task uses to automatically create work items when SDL tasks fail. Per the settings in the file, these work items will be filed within DevDiv (not DncEng). + +For additional information, see: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/sdlanalysis/tsasupport \ No newline at end of file From 1fc8414b52cc948e908462097ebbd9a5751f72e5 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 4 Sep 2025 15:02:13 -0700 Subject: [PATCH 053/118] Removed autoflush. Made content write per line. --- eng/create-workload-drops.ps1 | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/eng/create-workload-drops.ps1 b/eng/create-workload-drops.ps1 index 630fbfc49..f07858a68 100644 --- a/eng/create-workload-drops.ps1 +++ b/eng/create-workload-drops.ps1 @@ -30,22 +30,22 @@ Get-ChildItem -Path $workloadDropPath -Directory | ForEach-Object { # See: https://learn.microsoft.com/powershell/module/microsoft.powershell.utility/get-filehash#example-4-compute-the-hash-of-a-string $contentStream = [System.IO.MemoryStream]::new() $writer = [System.IO.StreamWriter]::new($contentStream) - # Automatically flushes the buffer after every Write call (necessary for workloads such as MAUI with a large number of files). - # See: https://learn.microsoft.com/dotnet/api/system.io.streamwriter.autoflush - $writer.AutoFlush = $true + # # Automatically flushes the buffer after every Write call (necessary for workloads such as MAUI with a large number of files). + # # See: https://learn.microsoft.com/dotnet/api/system.io.streamwriter.autoflush + # $writer.AutoFlush = $true $dropFiles = Get-ChildItem -Path $dropDir | Sort-Object foreach ($dropFile in $dropFiles) { try { # Note: We're using ASCII because when testing between PS 5.1 and PS 7.5, this would result in the same hash. Other encodings arrived at different hashes. - $fileContent = Get-Content -Path $dropFile.FullName -Encoding ASCII -Raw -ErrorAction Stop + $fileContentLines = Get-Content -Path $dropFile.FullName -Encoding ASCII -ErrorAction Stop } catch { Write-Host "Error: $($_.Exception.Message)" Write-Host "Type: $($_.Exception.GetType().FullName)" Write-Host "File: $($dropFile.FullName)" continue } - $null = $writer.Write($fileContent) + $null = $fileContentLines | ForEach-Object { $writer.WriteLine($_) } } $writer.Flush() $contentStream.Position = 0 From 1c5a68131e12ecba406701b4ef24aaa06bc0795a Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 4 Sep 2025 15:38:40 -0700 Subject: [PATCH 054/118] Trying reading the file as bytes and writing the bytes to the stream. --- eng/create-workload-drops.ps1 | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/eng/create-workload-drops.ps1 b/eng/create-workload-drops.ps1 index f07858a68..4af414e53 100644 --- a/eng/create-workload-drops.ps1 +++ b/eng/create-workload-drops.ps1 @@ -38,14 +38,17 @@ Get-ChildItem -Path $workloadDropPath -Directory | ForEach-Object { { try { # Note: We're using ASCII because when testing between PS 5.1 and PS 7.5, this would result in the same hash. Other encodings arrived at different hashes. - $fileContentLines = Get-Content -Path $dropFile.FullName -Encoding ASCII -ErrorAction Stop + # $fileContentLines = Get-Content -Path $dropFile.FullName -Encoding ASCII -ErrorAction Stop + # $null = $fileContentLines | ForEach-Object { $writer.WriteLine($_) } + + $fileBytes = [System.IO.File]::ReadAllBytes($dropFile.FullName) + $null = $writer.BaseStream.Write($fileBytes, 0, $fileBytes.Length) } catch { Write-Host "Error: $($_.Exception.Message)" Write-Host "Type: $($_.Exception.GetType().FullName)" Write-Host "File: $($dropFile.FullName)" continue } - $null = $fileContentLines | ForEach-Object { $writer.WriteLine($_) } } $writer.Flush() $contentStream.Position = 0 @@ -85,7 +88,7 @@ Get-ChildItem -Path $workloadDropPath -Directory | ForEach-Object { } } - Write-Host '⚠︎ After upload, your workload drop will be available at:' + Write-Host '!!! After upload, your workload drop will be available at:' Write-Host "https://devdiv.visualstudio.com/_apps/hub/ms-vscs-artifact.build-tasks.drop-hub-group-explorer-hub?name=$vsDropName" } From e6a95d46f293981188caedeee8e1db44fa5f12ea Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 4 Sep 2025 16:06:10 -0700 Subject: [PATCH 055/118] Stopped using a stream writer. Instead, directly create the memory stream with it being initialized by the bytes. --- eng/create-workload-drops.ps1 | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/eng/create-workload-drops.ps1 b/eng/create-workload-drops.ps1 index 4af414e53..bd34543c3 100644 --- a/eng/create-workload-drops.ps1 +++ b/eng/create-workload-drops.ps1 @@ -28,8 +28,8 @@ Get-ChildItem -Path $workloadDropPath -Directory | ForEach-Object { # Hash the files within the drop folder to create a unique identifier that represents this workload drop. # Example: 1E3EA4FE202394037253F57436A6EAD5DE1359792B618B9072014A98563A30FB # See: https://learn.microsoft.com/powershell/module/microsoft.powershell.utility/get-filehash#example-4-compute-the-hash-of-a-string - $contentStream = [System.IO.MemoryStream]::new() - $writer = [System.IO.StreamWriter]::new($contentStream) + # $contentStream = [System.IO.MemoryStream]::new() + # $writer = [System.IO.StreamWriter]::new($contentStream) # # Automatically flushes the buffer after every Write call (necessary for workloads such as MAUI with a large number of files). # # See: https://learn.microsoft.com/dotnet/api/system.io.streamwriter.autoflush # $writer.AutoFlush = $true @@ -42,7 +42,8 @@ Get-ChildItem -Path $workloadDropPath -Directory | ForEach-Object { # $null = $fileContentLines | ForEach-Object { $writer.WriteLine($_) } $fileBytes = [System.IO.File]::ReadAllBytes($dropFile.FullName) - $null = $writer.BaseStream.Write($fileBytes, 0, $fileBytes.Length) + # $null = $writer.BaseStream.Write($fileBytes, 0, $fileBytes.Length) + $contentStream = [System.IO.MemoryStream]::new($fileBytes) } catch { Write-Host "Error: $($_.Exception.Message)" Write-Host "Type: $($_.Exception.GetType().FullName)" @@ -50,10 +51,10 @@ Get-ChildItem -Path $workloadDropPath -Directory | ForEach-Object { continue } } - $writer.Flush() + # $writer.Flush() $contentStream.Position = 0 $dropHash = (Get-FileHash -InputStream $contentStream).Hash - $writer.Close() + # $writer.Close() $vsDropName = "Products/dotnet/workloads/$assemblyName/$dropHash" # Reads the first line out of the .metadata file in the workload's output folder and sets it to the workload version. From 0371d5fbda40302dc417b56f68a1cb314c280d2e Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 4 Sep 2025 16:37:55 -0700 Subject: [PATCH 056/118] Creating individual file hashes. Then hashing the hashes to make the final hash. --- eng/create-workload-drops.ps1 | 45 +++++++++++++++++++---------------- 1 file changed, 24 insertions(+), 21 deletions(-) diff --git a/eng/create-workload-drops.ps1 b/eng/create-workload-drops.ps1 index bd34543c3..f1b6674a8 100644 --- a/eng/create-workload-drops.ps1 +++ b/eng/create-workload-drops.ps1 @@ -28,33 +28,36 @@ Get-ChildItem -Path $workloadDropPath -Directory | ForEach-Object { # Hash the files within the drop folder to create a unique identifier that represents this workload drop. # Example: 1E3EA4FE202394037253F57436A6EAD5DE1359792B618B9072014A98563A30FB # See: https://learn.microsoft.com/powershell/module/microsoft.powershell.utility/get-filehash#example-4-compute-the-hash-of-a-string - # $contentStream = [System.IO.MemoryStream]::new() - # $writer = [System.IO.StreamWriter]::new($contentStream) + $contentStream = [System.IO.MemoryStream]::new() + $writer = [System.IO.StreamWriter]::new($contentStream) # # Automatically flushes the buffer after every Write call (necessary for workloads such as MAUI with a large number of files). # # See: https://learn.microsoft.com/dotnet/api/system.io.streamwriter.autoflush # $writer.AutoFlush = $true - $dropFiles = Get-ChildItem -Path $dropDir | Sort-Object - foreach ($dropFile in $dropFiles) - { - try { - # Note: We're using ASCII because when testing between PS 5.1 and PS 7.5, this would result in the same hash. Other encodings arrived at different hashes. - # $fileContentLines = Get-Content -Path $dropFile.FullName -Encoding ASCII -ErrorAction Stop - # $null = $fileContentLines | ForEach-Object { $writer.WriteLine($_) } + $dropFilePaths = Get-ChildItem -Path $dropDir | Sort-Object | ForEach-Object { $_.FullName } + # Hash each file individually, then write the hashes to the stream to create a combined hash. + $dropFileHashes = (Get-FileHash -Path $dropFilePaths).Hash + $null = $dropFileHashes | ForEach-Object { $writer.Write($_) } + # foreach ($dropFile in $dropFiles) + # { + # try { + # # Note: We're using ASCII because when testing between PS 5.1 and PS 7.5, this would result in the same hash. Other encodings arrived at different hashes. + # # $fileContentLines = Get-Content -Path $dropFile.FullName -Encoding ASCII -ErrorAction Stop + # # $null = $fileContentLines | ForEach-Object { $writer.WriteLine($_) } - $fileBytes = [System.IO.File]::ReadAllBytes($dropFile.FullName) - # $null = $writer.BaseStream.Write($fileBytes, 0, $fileBytes.Length) - $contentStream = [System.IO.MemoryStream]::new($fileBytes) - } catch { - Write-Host "Error: $($_.Exception.Message)" - Write-Host "Type: $($_.Exception.GetType().FullName)" - Write-Host "File: $($dropFile.FullName)" - continue - } - } - # $writer.Flush() + # $fileBytes = [System.IO.File]::ReadAllBytes($dropFile.FullName) + # # $null = $writer.BaseStream.Write($fileBytes, 0, $fileBytes.Length) + # $contentStream = [System.IO.MemoryStream]::new($fileBytes) + # } catch { + # Write-Host "Error: $($_.Exception.Message)" + # Write-Host "Type: $($_.Exception.GetType().FullName)" + # Write-Host "File: $($dropFile.FullName)" + # continue + # } + # } + $writer.Flush() $contentStream.Position = 0 $dropHash = (Get-FileHash -InputStream $contentStream).Hash - # $writer.Close() + $writer.Close() $vsDropName = "Products/dotnet/workloads/$assemblyName/$dropHash" # Reads the first line out of the .metadata file in the workload's output folder and sets it to the workload version. From 16e4e4e1d637e4e96f52eca70aa4b87b19f1ac8c Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 4 Sep 2025 17:00:12 -0700 Subject: [PATCH 057/118] Cleanup, since the hash of hashes solution seems to work without issue. --- eng/create-workload-drops.ps1 | 22 +--------------------- 1 file changed, 1 insertion(+), 21 deletions(-) diff --git a/eng/create-workload-drops.ps1 b/eng/create-workload-drops.ps1 index f1b6674a8..e00c595ac 100644 --- a/eng/create-workload-drops.ps1 +++ b/eng/create-workload-drops.ps1 @@ -30,30 +30,10 @@ Get-ChildItem -Path $workloadDropPath -Directory | ForEach-Object { # See: https://learn.microsoft.com/powershell/module/microsoft.powershell.utility/get-filehash#example-4-compute-the-hash-of-a-string $contentStream = [System.IO.MemoryStream]::new() $writer = [System.IO.StreamWriter]::new($contentStream) - # # Automatically flushes the buffer after every Write call (necessary for workloads such as MAUI with a large number of files). - # # See: https://learn.microsoft.com/dotnet/api/system.io.streamwriter.autoflush - # $writer.AutoFlush = $true - $dropFilePaths = Get-ChildItem -Path $dropDir | Sort-Object | ForEach-Object { $_.FullName } + $dropFilePaths = (Get-ChildItem -Path $dropDir | Sort-Object).FullName # Hash each file individually, then write the hashes to the stream to create a combined hash. $dropFileHashes = (Get-FileHash -Path $dropFilePaths).Hash $null = $dropFileHashes | ForEach-Object { $writer.Write($_) } - # foreach ($dropFile in $dropFiles) - # { - # try { - # # Note: We're using ASCII because when testing between PS 5.1 and PS 7.5, this would result in the same hash. Other encodings arrived at different hashes. - # # $fileContentLines = Get-Content -Path $dropFile.FullName -Encoding ASCII -ErrorAction Stop - # # $null = $fileContentLines | ForEach-Object { $writer.WriteLine($_) } - - # $fileBytes = [System.IO.File]::ReadAllBytes($dropFile.FullName) - # # $null = $writer.BaseStream.Write($fileBytes, 0, $fileBytes.Length) - # $contentStream = [System.IO.MemoryStream]::new($fileBytes) - # } catch { - # Write-Host "Error: $($_.Exception.Message)" - # Write-Host "Type: $($_.Exception.GetType().FullName)" - # Write-Host "File: $($dropFile.FullName)" - # continue - # } - # } $writer.Flush() $contentStream.Position = 0 $dropHash = (Get-FileHash -InputStream $contentStream).Hash From 59c272c1108bdc9ad72bdad8457e80e4aef9ac44 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 5 Sep 2025 16:09:12 -0700 Subject: [PATCH 058/118] Added logic to update the run name for official runs. Added some debug for resource variables. --- eng/pipelines/official.yml | 16 ++++++++++++++++ eng/pipelines/templates/jobs/workload-build.yml | 9 +++++++++ .../templates/steps/workload-checkout.yml | 8 ++++++++ 3 files changed, 33 insertions(+) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 0add76a6e..e859219b6 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -4,6 +4,10 @@ trigger: none pr: none +# Required to set a custom run name within workload-checkout.yml. +# See: https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/pipeline +appendCommitMessageToRunName: false + parameters: - name: sourceBranch displayName: 🚩 Source Branch 🚩 @@ -105,6 +109,18 @@ variables: # DotNet-DevDiv-Insertion-Workflow-Variables provides: dn-bot-devdiv-drop-rw-code-rw # https://dnceng.visualstudio.com/internal/_library?itemType=VariableGroups&view=VariableGroupView&variableGroupId=33&path=DotNet-DevDiv-Insertion-Workflow-Variables - group: DotNet-DevDiv-Insertion-Workflow-Variables +- name: ref + value: $[ resources.repositories.source.ref ] +- name: name + value: $[ resources.repositories.source.name ] +- name: id + value: $[ resources.repositories.source.id ] +- name: type + value: $[ resources.repositories.source.type ] +- name: url + value: $[ resources.repositories.source.url ] +- name: version + value: $[ resources.repositories.source.version ] resources: repositories: diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index a4de24fd7..c068d70a4 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -39,6 +39,15 @@ jobs: parameters: sourceBranch: source engBranch: self + useSourceCommitMessageRunName: true + - bash: | + echo "name = $(name)" + echo "ref = $(ref)" + echo "id = $(id)" + echo "type = $(type)" + echo "url = $(url)" + echo "version = $(version)" + displayName: 🟣 Display repository variables - ${{ if eq(parameters.createVSInsertion, true) }}: - task: AzureCLI@2 displayName: 🟣 Download workloads for VS insertion diff --git a/eng/pipelines/templates/steps/workload-checkout.yml b/eng/pipelines/templates/steps/workload-checkout.yml index b2f2fdd91..cc9f91b33 100644 --- a/eng/pipelines/templates/steps/workload-checkout.yml +++ b/eng/pipelines/templates/steps/workload-checkout.yml @@ -1,6 +1,7 @@ parameters: sourceBranch: self engBranch: eng + useSourceCommitMessageRunName: false steps: # For checkout mechanics, see: @@ -11,6 +12,13 @@ steps: path: source-branch workspaceRepo: true displayName: 🟣 Checkout source branch +# Sets the run name to use the source commit message. +# See: https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number +- ${{ if eq(parameters.useSourceCommitMessageRunName, true) }}: + - powershell: | + $commitMessage = "$(git log -1 --pretty=%s)".Trim() + Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" + displayName: 🟣 Set run name from source commit message # There is a warning when performing the second checkout, ##[warning]Unable move and reuse existing repository to required location. # This happens because both checkouts are of the same repository, thus have the same path of: D:\a\_work\1\s\workload-versions # The first checkout deletes this directory. To avoid the warning, simply create the directory beforehand. From f8d1ab6826433512bdd65c5fc4e94de55854bc58 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 5 Sep 2025 17:21:10 -0700 Subject: [PATCH 059/118] Updated PR branches as the dependent changes have been merged. --- eng/pipelines/public.yml | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/eng/pipelines/public.yml b/eng/pipelines/public.yml index dd6d8a0b9..8f1f70038 100644 --- a/eng/pipelines/public.yml +++ b/eng/pipelines/public.yml @@ -13,22 +13,19 @@ resources: - repository: main type: github name: dotnet/workload-versions - # TODO: Update when the changes are merged to the main branch. - ref: refs/heads/feature/centralized-ci + ref: refs/heads/main # https://dev.azure.com/dnceng-public/public/_settings/adminservices?resourceId=690f39b4-7746-42c2-be89-281bd7c78b9e endpoint: public - repository: release8 type: github name: dotnet/workload-versions - # TODO: Update when the changes are merged to the appropriate release branch. - ref: refs/heads/feature/centralized-ci-8 + ref: refs/heads/release/8.0.4xx # https://dev.azure.com/dnceng-public/public/_settings/adminservices?resourceId=690f39b4-7746-42c2-be89-281bd7c78b9e endpoint: public - repository: release9 type: github name: dotnet/workload-versions - # TODO: Update when the changes are merged to the appropriate release branch. - ref: refs/heads/feature/centralized-ci-9 + ref: refs/heads/release/9.0.1xx # https://dev.azure.com/dnceng-public/public/_settings/adminservices?resourceId=690f39b4-7746-42c2-be89-281bd7c78b9e endpoint: public From 6daa598f9973c1c9e17e2451924dccabfc47c1b2 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 5 Sep 2025 17:31:30 -0700 Subject: [PATCH 060/118] Add eng to PR triggers. --- eng/pipelines/public.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/eng/pipelines/public.yml b/eng/pipelines/public.yml index 8f1f70038..7dd0c0441 100644 --- a/eng/pipelines/public.yml +++ b/eng/pipelines/public.yml @@ -5,6 +5,7 @@ pr: include: - main - release/* + - eng trigger: none From a87cd362cf8f97441d52f6d59f304bb6e3d11127 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 5 Sep 2025 17:56:09 -0700 Subject: [PATCH 061/118] Clean commit message prior to setting to run name. Add listing environment variables for debugging. --- eng/pipelines/templates/jobs/workload-build.yml | 4 ++++ eng/pipelines/templates/steps/workload-checkout.yml | 5 +++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index c068d70a4..07b8ec54e 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -48,6 +48,10 @@ jobs: echo "url = $(url)" echo "version = $(version)" displayName: 🟣 Display repository variables + condition: always() + - powershell: 'Get-ChildItem env:' + displayName: 🟣 List Environment Variables + condition: always() - ${{ if eq(parameters.createVSInsertion, true) }}: - task: AzureCLI@2 displayName: 🟣 Download workloads for VS insertion diff --git a/eng/pipelines/templates/steps/workload-checkout.yml b/eng/pipelines/templates/steps/workload-checkout.yml index cc9f91b33..e357081fb 100644 --- a/eng/pipelines/templates/steps/workload-checkout.yml +++ b/eng/pipelines/templates/steps/workload-checkout.yml @@ -16,8 +16,9 @@ steps: # See: https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number - ${{ if eq(parameters.useSourceCommitMessageRunName, true) }}: - powershell: | - $commitMessage = "$(git log -1 --pretty=%s)".Trim() - Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" + # Remove all non-alphanumeric characters and truncate to 255 max characters (255 - 14 for number and preamble) to avoid run name issues. + $commitMessage = ("$(git log -1 --pretty=%s)".Trim() -replace '[^a-zA-Z0-9]', '').Substring(0, 241) + Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) - $commitMessage" displayName: 🟣 Set run name from source commit message # There is a warning when performing the second checkout, ##[warning]Unable move and reuse existing repository to required location. # This happens because both checkouts are of the same repository, thus have the same path of: D:\a\_work\1\s\workload-versions From 6b7ff7b6be99258dd2a4b83bdb286273e13b6b40 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 5 Sep 2025 18:20:46 -0700 Subject: [PATCH 062/118] Fix truncation script. --- eng/pipelines/templates/steps/workload-checkout.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/eng/pipelines/templates/steps/workload-checkout.yml b/eng/pipelines/templates/steps/workload-checkout.yml index e357081fb..25b604402 100644 --- a/eng/pipelines/templates/steps/workload-checkout.yml +++ b/eng/pipelines/templates/steps/workload-checkout.yml @@ -16,8 +16,8 @@ steps: # See: https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number - ${{ if eq(parameters.useSourceCommitMessageRunName, true) }}: - powershell: | - # Remove all non-alphanumeric characters and truncate to 255 max characters (255 - 14 for number and preamble) to avoid run name issues. - $commitMessage = ("$(git log -1 --pretty=%s)".Trim() -replace '[^a-zA-Z0-9]', '').Substring(0, 241) + # Keep only alphanumeric characters (and space) and truncate to 255 max characters (255 - 14 for build number and delimiter) to avoid run name issues. + $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '[^a-zA-Z0-9 ]', '' | Select-Object -First 241 Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) - $commitMessage" displayName: 🟣 Set run name from source commit message # There is a warning when performing the second checkout, ##[warning]Unable move and reuse existing repository to required location. From faed562d7aae89c02252fa0fa0640358efb709b1 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 9 Sep 2025 13:23:59 -0700 Subject: [PATCH 063/118] Remove debug tasks. Moved the run number change to happen after the build. --- eng/pipelines/official.yml | 12 ---------- .../templates/jobs/workload-build.yml | 22 ++++++++----------- .../templates/steps/workload-checkout.yml | 9 -------- 3 files changed, 9 insertions(+), 34 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index e859219b6..bc77cc425 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -109,18 +109,6 @@ variables: # DotNet-DevDiv-Insertion-Workflow-Variables provides: dn-bot-devdiv-drop-rw-code-rw # https://dnceng.visualstudio.com/internal/_library?itemType=VariableGroups&view=VariableGroupView&variableGroupId=33&path=DotNet-DevDiv-Insertion-Workflow-Variables - group: DotNet-DevDiv-Insertion-Workflow-Variables -- name: ref - value: $[ resources.repositories.source.ref ] -- name: name - value: $[ resources.repositories.source.name ] -- name: id - value: $[ resources.repositories.source.id ] -- name: type - value: $[ resources.repositories.source.type ] -- name: url - value: $[ resources.repositories.source.url ] -- name: version - value: $[ resources.repositories.source.version ] resources: repositories: diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 07b8ec54e..56f1b183d 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -39,19 +39,6 @@ jobs: parameters: sourceBranch: source engBranch: self - useSourceCommitMessageRunName: true - - bash: | - echo "name = $(name)" - echo "ref = $(ref)" - echo "id = $(id)" - echo "type = $(type)" - echo "url = $(url)" - echo "version = $(version)" - displayName: 🟣 Display repository variables - condition: always() - - powershell: 'Get-ChildItem env:' - displayName: 🟣 List Environment Variables - condition: always() - ${{ if eq(parameters.createVSInsertion, true) }}: - task: AzureCLI@2 displayName: 🟣 Download workloads for VS insertion @@ -81,6 +68,15 @@ jobs: /p:OfficialBuildId=$(Build.BuildNumber) /p:StabilizePackageVersion=${{ parameters.stabilizePackageVersion }} displayName: 🟣 Build solution + # Sets the run name to use the source commit message. + # See: https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number + # Note: This must be performed AFTER the build, as Arcade requires the default run number for the OfficialBuildId property. + - powershell: | + # Keep only alphanumeric characters (and space) and truncate to 255 max characters (255 - 14 for build number and delimiter) to avoid run name issues. + $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '[^a-zA-Z0-9 ]', '' | Select-Object -First 241 + Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" + displayName: 🟣 Set run name from source commit message + condition: always() - ${{ if eq(parameters.createVSInsertion, true) }}: # The variables comprised of workloadShortName and workloadType are set during create-workload-drops.ps1 in Microsoft.NET.Workloads.Vsman.csproj. diff --git a/eng/pipelines/templates/steps/workload-checkout.yml b/eng/pipelines/templates/steps/workload-checkout.yml index 25b604402..b2f2fdd91 100644 --- a/eng/pipelines/templates/steps/workload-checkout.yml +++ b/eng/pipelines/templates/steps/workload-checkout.yml @@ -1,7 +1,6 @@ parameters: sourceBranch: self engBranch: eng - useSourceCommitMessageRunName: false steps: # For checkout mechanics, see: @@ -12,14 +11,6 @@ steps: path: source-branch workspaceRepo: true displayName: 🟣 Checkout source branch -# Sets the run name to use the source commit message. -# See: https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number -- ${{ if eq(parameters.useSourceCommitMessageRunName, true) }}: - - powershell: | - # Keep only alphanumeric characters (and space) and truncate to 255 max characters (255 - 14 for build number and delimiter) to avoid run name issues. - $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '[^a-zA-Z0-9 ]', '' | Select-Object -First 241 - Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) - $commitMessage" - displayName: 🟣 Set run name from source commit message # There is a warning when performing the second checkout, ##[warning]Unable move and reuse existing repository to required location. # This happens because both checkouts are of the same repository, thus have the same path of: D:\a\_work\1\s\workload-versions # The first checkout deletes this directory. To avoid the warning, simply create the directory beforehand. From dd0e4b2250bcd399c6e6059807a1425c1669c1f1 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 9 Sep 2025 13:50:11 -0700 Subject: [PATCH 064/118] Added back the previous way to set the run name except saving the Build.BuildNumber to a separate variable for the build. --- eng/pipelines/templates/jobs/workload-build.yml | 16 ++++++---------- .../templates/steps/workload-checkout.yml | 10 ++++++++++ 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 56f1b183d..6247738a2 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -34,11 +34,16 @@ jobs: name: $(DncEngInternalBuildPool) image: 1es-windows-2022 os: windows + variables: + # Build.BuildNumber is changed to set the run name to contain the source branch commit message. + # Therefore, Build.BuildNumber is stored to a variable since the original value is needed for Arcade as the OfficialBuildId property. + OfficialBuildId: $(Build.BuildNumber) steps: - template: /eng/pipelines/templates/steps/workload-checkout.yml@self parameters: sourceBranch: source engBranch: self + useSourceCommitMessageRunName: true - ${{ if eq(parameters.createVSInsertion, true) }}: - task: AzureCLI@2 displayName: 🟣 Download workloads for VS insertion @@ -65,18 +70,9 @@ jobs: /p:DotNetSignType=$(_SignType) /p:TeamName=$(_TeamName) /p:DotNetPublishUsingPipelines=true - /p:OfficialBuildId=$(Build.BuildNumber) + /p:OfficialBuildId=$(OfficialBuildId) /p:StabilizePackageVersion=${{ parameters.stabilizePackageVersion }} displayName: 🟣 Build solution - # Sets the run name to use the source commit message. - # See: https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number - # Note: This must be performed AFTER the build, as Arcade requires the default run number for the OfficialBuildId property. - - powershell: | - # Keep only alphanumeric characters (and space) and truncate to 255 max characters (255 - 14 for build number and delimiter) to avoid run name issues. - $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '[^a-zA-Z0-9 ]', '' | Select-Object -First 241 - Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" - displayName: 🟣 Set run name from source commit message - condition: always() - ${{ if eq(parameters.createVSInsertion, true) }}: # The variables comprised of workloadShortName and workloadType are set during create-workload-drops.ps1 in Microsoft.NET.Workloads.Vsman.csproj. diff --git a/eng/pipelines/templates/steps/workload-checkout.yml b/eng/pipelines/templates/steps/workload-checkout.yml index b2f2fdd91..8a5275589 100644 --- a/eng/pipelines/templates/steps/workload-checkout.yml +++ b/eng/pipelines/templates/steps/workload-checkout.yml @@ -1,6 +1,7 @@ parameters: sourceBranch: self engBranch: eng + useSourceCommitMessageRunName: false steps: # For checkout mechanics, see: @@ -11,6 +12,15 @@ steps: path: source-branch workspaceRepo: true displayName: 🟣 Checkout source branch +# Sets the run name to use the source branch commit message. +# See: https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number +- ${{ if eq(parameters.useSourceCommitMessageRunName, true) }}: + - powershell: | + # Keep only alphanumeric characters (and space) and truncate to 255 max characters (255 - 14 for build number and delimiter) to avoid run name issues. + $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '[^a-zA-Z0-9 ]', '' | Select-Object -First 241 + Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" + displayName: 🟣 Set run name via source branch commit message + condition: always() # There is a warning when performing the second checkout, ##[warning]Unable move and reuse existing repository to required location. # This happens because both checkouts are of the same repository, thus have the same path of: D:\a\_work\1\s\workload-versions # The first checkout deletes this directory. To avoid the warning, simply create the directory beforehand. From 9bdb53de2985ddbb704c13359e859914f51fc749 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 9 Sep 2025 16:00:23 -0700 Subject: [PATCH 065/118] Fixed incorrect variable formatting for Arcade to handle. --- eng/pipelines/templates/jobs/workload-build.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 6247738a2..306810652 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -35,9 +35,10 @@ jobs: image: 1es-windows-2022 os: windows variables: - # Build.BuildNumber is changed to set the run name to contain the source branch commit message. - # Therefore, Build.BuildNumber is stored to a variable since the original value is needed for Arcade as the OfficialBuildId property. - OfficialBuildId: $(Build.BuildNumber) + # Build.BuildNumber is changed to set the run name to contain the source branch commit message. + # Therefore, Build.BuildNumber is stored to a variable since the original value is needed for Arcade as the OfficialBuildId property. + - name: OfficialBuildId + value: $(Build.BuildNumber) steps: - template: /eng/pipelines/templates/steps/workload-checkout.yml@self parameters: From 419ea591e5a25090c7968995ad1164620c39b553 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 10 Sep 2025 11:00:43 -0700 Subject: [PATCH 066/118] Trying to use runtime expression for the build number. --- eng/pipelines/templates/jobs/workload-build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 306810652..09605fec9 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -38,7 +38,7 @@ jobs: # Build.BuildNumber is changed to set the run name to contain the source branch commit message. # Therefore, Build.BuildNumber is stored to a variable since the original value is needed for Arcade as the OfficialBuildId property. - name: OfficialBuildId - value: $(Build.BuildNumber) + value: $[variables.Build.BuildNumber] steps: - template: /eng/pipelines/templates/steps/workload-checkout.yml@self parameters: From ae7ed44ad3664b8b6f05fc0804f54260db5fd705 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 10 Sep 2025 11:21:57 -0700 Subject: [PATCH 067/118] Set the working directory for setting the run name. --- eng/pipelines/templates/jobs/workload-build.yml | 1 + eng/pipelines/templates/steps/workload-checkout.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 09605fec9..dce456856 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -37,6 +37,7 @@ jobs: variables: # Build.BuildNumber is changed to set the run name to contain the source branch commit message. # Therefore, Build.BuildNumber is stored to a variable since the original value is needed for Arcade as the OfficialBuildId property. + # Using a runtime expression $[] allows this to be resolved prior to being changed. - name: OfficialBuildId value: $[variables.Build.BuildNumber] steps: diff --git a/eng/pipelines/templates/steps/workload-checkout.yml b/eng/pipelines/templates/steps/workload-checkout.yml index 8a5275589..ecbde0844 100644 --- a/eng/pipelines/templates/steps/workload-checkout.yml +++ b/eng/pipelines/templates/steps/workload-checkout.yml @@ -20,6 +20,7 @@ steps: $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '[^a-zA-Z0-9 ]', '' | Select-Object -First 241 Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" displayName: 🟣 Set run name via source branch commit message + workingDirectory: $(Agent.BuildDirectory)\source-branch condition: always() # There is a warning when performing the second checkout, ##[warning]Unable move and reuse existing repository to required location. # This happens because both checkouts are of the same repository, thus have the same path of: D:\a\_work\1\s\workload-versions From e143af7ba05bf6ee56562c1d4546343cfab5f940 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 10 Sep 2025 11:32:36 -0700 Subject: [PATCH 068/118] Specifically set source branch in the git log command. --- eng/pipelines/templates/steps/workload-checkout.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/eng/pipelines/templates/steps/workload-checkout.yml b/eng/pipelines/templates/steps/workload-checkout.yml index ecbde0844..201521be8 100644 --- a/eng/pipelines/templates/steps/workload-checkout.yml +++ b/eng/pipelines/templates/steps/workload-checkout.yml @@ -17,10 +17,9 @@ steps: - ${{ if eq(parameters.useSourceCommitMessageRunName, true) }}: - powershell: | # Keep only alphanumeric characters (and space) and truncate to 255 max characters (255 - 14 for build number and delimiter) to avoid run name issues. - $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '[^a-zA-Z0-9 ]', '' | Select-Object -First 241 + $commitMessage = "$(git log ${{ parameters.sourceBranch }} -1 --pretty=%s)".Trim() -replace '[^a-zA-Z0-9 ]', '' | Select-Object -First 241 Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" displayName: 🟣 Set run name via source branch commit message - workingDirectory: $(Agent.BuildDirectory)\source-branch condition: always() # There is a warning when performing the second checkout, ##[warning]Unable move and reuse existing repository to required location. # This happens because both checkouts are of the same repository, thus have the same path of: D:\a\_work\1\s\workload-versions From 8d67a5f79bd33dd30e0cc4e6ca283d011b34cccc Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 10 Sep 2025 11:46:14 -0700 Subject: [PATCH 069/118] Passed the source branch through to calling the git log command. --- eng/pipelines/official.yml | 1 + eng/pipelines/templates/jobs/workload-build.yml | 5 +++-- eng/pipelines/templates/steps/workload-checkout.yml | 9 +++++---- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index bc77cc425..c1af12edb 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -157,6 +157,7 @@ extends: workloadDropNames: ${{ parameters.workloadDropNames }} primaryVsInsertionBranches: ${{ parameters.primaryVsInsertionBranches }} secondaryVsInsertionBranches: ${{ parameters.secondaryVsInsertionBranches }} + sourceBranch: ${{ parameters.sourceBranch }} - ${{ if or(eq(parameters.publishToAzDO, true), eq(parameters.publishToNuGet, true)) }}: - stage: Publish displayName: Publish diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index dce456856..08abf37f2 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -43,9 +43,10 @@ jobs: steps: - template: /eng/pipelines/templates/steps/workload-checkout.yml@self parameters: - sourceBranch: source - engBranch: self + sourceBranchAlias: source + engBranchAlias: self useSourceCommitMessageRunName: true + sourceBranch: ${{ parameters.sourceBranch }} - ${{ if eq(parameters.createVSInsertion, true) }}: - task: AzureCLI@2 displayName: 🟣 Download workloads for VS insertion diff --git a/eng/pipelines/templates/steps/workload-checkout.yml b/eng/pipelines/templates/steps/workload-checkout.yml index 201521be8..e3a33fa50 100644 --- a/eng/pipelines/templates/steps/workload-checkout.yml +++ b/eng/pipelines/templates/steps/workload-checkout.yml @@ -1,14 +1,15 @@ parameters: - sourceBranch: self - engBranch: eng + sourceBranchAlias: self + engBranchAlias: eng useSourceCommitMessageRunName: false + sourceBranch: main steps: # For checkout mechanics, see: # https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#build-variables-devops-services # https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/steps-checkout?view=azure-pipelines # https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path -- checkout: ${{ parameters.sourceBranch }} +- checkout: ${{ parameters.sourceBranchAlias }} path: source-branch workspaceRepo: true displayName: 🟣 Checkout source branch @@ -29,7 +30,7 @@ steps: # To resolve this, we split on '/' and take the last element, which will always be the repository name only. - powershell: New-Item -Path "$(Agent.BuildDirectory)\s" -Name "$('$(Build.Repository.Name)' -Split '/' | Select-Object -Last 1)" -ItemType Directory displayName: 🟣 [Workaround] Create checkout directory -- checkout: ${{ parameters.engBranch }} +- checkout: ${{ parameters.engBranchAlias }} path: eng-branch displayName: 🟣 Checkout eng branch # The \* is required for the Exclude to work properly. From de4eeac27da0a413825ee341a2b256530adb4f99 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 10 Sep 2025 12:04:08 -0700 Subject: [PATCH 070/118] Adding debug for current branch name. --- eng/pipelines/templates/steps/workload-checkout.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/eng/pipelines/templates/steps/workload-checkout.yml b/eng/pipelines/templates/steps/workload-checkout.yml index e3a33fa50..a5de0ccdb 100644 --- a/eng/pipelines/templates/steps/workload-checkout.yml +++ b/eng/pipelines/templates/steps/workload-checkout.yml @@ -18,6 +18,7 @@ steps: - ${{ if eq(parameters.useSourceCommitMessageRunName, true) }}: - powershell: | # Keep only alphanumeric characters (and space) and truncate to 255 max characters (255 - 14 for build number and delimiter) to avoid run name issues. + Write-Host "$(git branch --show-current)" $commitMessage = "$(git log ${{ parameters.sourceBranch }} -1 --pretty=%s)".Trim() -replace '[^a-zA-Z0-9 ]', '' | Select-Object -First 241 Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" displayName: 🟣 Set run name via source branch commit message From 2049ac74860a1013012699dffb98f8d4a33d10a6 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 10 Sep 2025 14:27:00 -0700 Subject: [PATCH 071/118] Removed all the extra work I was doing as I was confused about which commit message it should be showing. Trying compile-time syntax for the OfficialBuildId. --- eng/pipelines/official.yml | 1 - eng/pipelines/templates/jobs/workload-build.yml | 12 +++++++++--- eng/pipelines/templates/steps/workload-checkout.yml | 12 ------------ 3 files changed, 9 insertions(+), 16 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index c1af12edb..bc77cc425 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -157,7 +157,6 @@ extends: workloadDropNames: ${{ parameters.workloadDropNames }} primaryVsInsertionBranches: ${{ parameters.primaryVsInsertionBranches }} secondaryVsInsertionBranches: ${{ parameters.secondaryVsInsertionBranches }} - sourceBranch: ${{ parameters.sourceBranch }} - ${{ if or(eq(parameters.publishToAzDO, true), eq(parameters.publishToNuGet, true)) }}: - stage: Publish displayName: Publish diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 08abf37f2..c4559cb66 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -39,14 +39,20 @@ jobs: # Therefore, Build.BuildNumber is stored to a variable since the original value is needed for Arcade as the OfficialBuildId property. # Using a runtime expression $[] allows this to be resolved prior to being changed. - name: OfficialBuildId - value: $[variables.Build.BuildNumber] + # value: $[variables.Build.BuildNumber] + value: ${{ variables.Build.BuildNumber }} steps: - template: /eng/pipelines/templates/steps/workload-checkout.yml@self parameters: sourceBranchAlias: source engBranchAlias: self - useSourceCommitMessageRunName: true - sourceBranch: ${{ parameters.sourceBranch }} + # Sets the run name to use the source branch commit message. + # See: https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number + - powershell: | + # Keep only alphanumeric characters (and space) and truncate to 255 max characters (255 - 14 for build number and delimiter) to avoid run name issues. + $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '[^a-zA-Z0-9 ]', '' | Select-Object -First 241 + Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" + displayName: 🟣 Set run name via source branch commit message - ${{ if eq(parameters.createVSInsertion, true) }}: - task: AzureCLI@2 displayName: 🟣 Download workloads for VS insertion diff --git a/eng/pipelines/templates/steps/workload-checkout.yml b/eng/pipelines/templates/steps/workload-checkout.yml index a5de0ccdb..353268a42 100644 --- a/eng/pipelines/templates/steps/workload-checkout.yml +++ b/eng/pipelines/templates/steps/workload-checkout.yml @@ -1,8 +1,6 @@ parameters: sourceBranchAlias: self engBranchAlias: eng - useSourceCommitMessageRunName: false - sourceBranch: main steps: # For checkout mechanics, see: @@ -13,16 +11,6 @@ steps: path: source-branch workspaceRepo: true displayName: 🟣 Checkout source branch -# Sets the run name to use the source branch commit message. -# See: https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number -- ${{ if eq(parameters.useSourceCommitMessageRunName, true) }}: - - powershell: | - # Keep only alphanumeric characters (and space) and truncate to 255 max characters (255 - 14 for build number and delimiter) to avoid run name issues. - Write-Host "$(git branch --show-current)" - $commitMessage = "$(git log ${{ parameters.sourceBranch }} -1 --pretty=%s)".Trim() -replace '[^a-zA-Z0-9 ]', '' | Select-Object -First 241 - Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" - displayName: 🟣 Set run name via source branch commit message - condition: always() # There is a warning when performing the second checkout, ##[warning]Unable move and reuse existing repository to required location. # This happens because both checkouts are of the same repository, thus have the same path of: D:\a\_work\1\s\workload-versions # The first checkout deletes this directory. To avoid the warning, simply create the directory beforehand. From 8e59cd95ae10644d6fed3c347215e5accfad883c Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 10 Sep 2025 14:40:57 -0700 Subject: [PATCH 072/118] Testing if OfficialBuildId resolves properly. --- eng/pipelines/templates/jobs/workload-build.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index c4559cb66..d8281577e 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -39,8 +39,7 @@ jobs: # Therefore, Build.BuildNumber is stored to a variable since the original value is needed for Arcade as the OfficialBuildId property. # Using a runtime expression $[] allows this to be resolved prior to being changed. - name: OfficialBuildId - # value: $[variables.Build.BuildNumber] - value: ${{ variables.Build.BuildNumber }} + value: $[ variables.Build.BuildNumber ] steps: - template: /eng/pipelines/templates/steps/workload-checkout.yml@self parameters: @@ -50,6 +49,7 @@ jobs: # See: https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number - powershell: | # Keep only alphanumeric characters (and space) and truncate to 255 max characters (255 - 14 for build number and delimiter) to avoid run name issues. + Write-Host "OfficialBuildId: $(OfficialBuildId)" $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '[^a-zA-Z0-9 ]', '' | Select-Object -First 241 Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" displayName: 🟣 Set run name via source branch commit message From 14376b6c991bc1563a59b863ef854ddadd0b0a46 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 10 Sep 2025 15:15:49 -0700 Subject: [PATCH 073/118] Hopefully, this sets OfficialBuildId now. --- eng/pipelines/templates/jobs/workload-build.yml | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index d8281577e..236e9e4a0 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -34,25 +34,23 @@ jobs: name: $(DncEngInternalBuildPool) image: 1es-windows-2022 os: windows - variables: - # Build.BuildNumber is changed to set the run name to contain the source branch commit message. - # Therefore, Build.BuildNumber is stored to a variable since the original value is needed for Arcade as the OfficialBuildId property. - # Using a runtime expression $[] allows this to be resolved prior to being changed. - - name: OfficialBuildId - value: $[ variables.Build.BuildNumber ] steps: - template: /eng/pipelines/templates/steps/workload-checkout.yml@self parameters: sourceBranchAlias: source engBranchAlias: self # Sets the run name to use the source branch commit message. + # Also, sets the OfficialBuildId variable to the original Build.BuildNumber for use in Arcade. # See: https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number - powershell: | # Keep only alphanumeric characters (and space) and truncate to 255 max characters (255 - 14 for build number and delimiter) to avoid run name issues. - Write-Host "OfficialBuildId: $(OfficialBuildId)" + Write-Host "##vso[task.setvariable variable=OfficialBuildId]$(Build.BuildNumber)" $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '[^a-zA-Z0-9 ]', '' | Select-Object -First 241 Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" displayName: 🟣 Set run name via source branch commit message + - powershell: | + Write-Host "OfficialBuildId: $(OfficialBuildId)" + displayName: 🟣 OfficialBuildId - ${{ if eq(parameters.createVSInsertion, true) }}: - task: AzureCLI@2 displayName: 🟣 Download workloads for VS insertion From c4522395195405eca635d61be979e927cf94c614 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 10 Sep 2025 15:39:11 -0700 Subject: [PATCH 074/118] Remove debug message. --- eng/pipelines/templates/jobs/workload-build.yml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 236e9e4a0..bc822f098 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -43,14 +43,11 @@ jobs: # Also, sets the OfficialBuildId variable to the original Build.BuildNumber for use in Arcade. # See: https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number - powershell: | - # Keep only alphanumeric characters (and space) and truncate to 255 max characters (255 - 14 for build number and delimiter) to avoid run name issues. Write-Host "##vso[task.setvariable variable=OfficialBuildId]$(Build.BuildNumber)" + # Keep only alphanumeric characters (and space) and truncate to 255 max characters (255 - 14 for build number and delimiter) to avoid run name issues. $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '[^a-zA-Z0-9 ]', '' | Select-Object -First 241 Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" displayName: 🟣 Set run name via source branch commit message - - powershell: | - Write-Host "OfficialBuildId: $(OfficialBuildId)" - displayName: 🟣 OfficialBuildId - ${{ if eq(parameters.createVSInsertion, true) }}: - task: AzureCLI@2 displayName: 🟣 Download workloads for VS insertion From 2a463f690ee4ce2fe45d11b14dbbc0b29ecb72fb Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 10 Sep 2025 15:43:33 -0700 Subject: [PATCH 075/118] Update comment text. --- eng/pipelines/official.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index bc77cc425..2157bb186 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -4,7 +4,7 @@ trigger: none pr: none -# Required to set a custom run name within workload-checkout.yml. +# Required to set a custom run name within workload-build.yml. # See: https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/pipeline appendCommitMessageToRunName: false From 01968475a8e7dd4b4726396ea0828c0e05777eb5 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 10 Sep 2025 16:38:05 -0700 Subject: [PATCH 076/118] Whoops. When updating the parameter names, forgot to update them for this branch's PR build. --- eng/pipelines/public.yml | 12 ++++++------ .../templates/stages/workload-public-build.yml | 18 +++++++++--------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/eng/pipelines/public.yml b/eng/pipelines/public.yml index 7dd0c0441..0575c4309 100644 --- a/eng/pipelines/public.yml +++ b/eng/pipelines/public.yml @@ -38,13 +38,13 @@ variables: stages: - template: /eng/pipelines/templates/stages/workload-public-build.yml@self parameters: - sourceBranch: main - engBranch: self + sourceBranchAlias: main + engBranchAlias: self - template: /eng/pipelines/templates/stages/workload-public-build.yml@self parameters: - sourceBranch: release8 - engBranch: self + sourceBranchAlias: release8 + engBranchAlias: self - template: /eng/pipelines/templates/stages/workload-public-build.yml@self parameters: - sourceBranch: release9 - engBranch: self \ No newline at end of file + sourceBranchAlias: release9 + engBranchAlias: self \ No newline at end of file diff --git a/eng/pipelines/templates/stages/workload-public-build.yml b/eng/pipelines/templates/stages/workload-public-build.yml index 5053372ac..df5e17336 100644 --- a/eng/pipelines/templates/stages/workload-public-build.yml +++ b/eng/pipelines/templates/stages/workload-public-build.yml @@ -1,13 +1,13 @@ parameters: - sourceBranch: self - engBranch: eng + sourceBranchAlias: self + engBranchAlias: eng stages: -- stage: Build_${{ parameters.sourceBranch }} - displayName: Build ${{ parameters.sourceBranch }} +- stage: Build_${{ parameters.sourceBranchAlias }} + displayName: Build ${{ parameters.sourceBranchAlias }} dependsOn: [] jobs: - - template: /eng/common/templates/job/job.yml@${{ parameters.sourceBranch }} + - template: /eng/common/templates/job/job.yml@${{ parameters.sourceBranchAlias }} parameters: name: buildRepo displayName: Build Repo @@ -17,12 +17,12 @@ stages: artifacts: publish: logs: - name: Logs_${{ parameters.sourceBranch }} + name: Logs_${{ parameters.sourceBranchAlias }} steps: - - template: /eng/pipelines/templates/steps/workload-checkout.yml@${{ parameters.engBranch }} + - template: /eng/pipelines/templates/steps/workload-checkout.yml@${{ parameters.engBranchAlias }} parameters: - sourceBranch: ${{ parameters.sourceBranch }} - engBranch: ${{ parameters.engBranch }} + sourceBranchAlias: ${{ parameters.sourceBranchAlias }} + engBranchAlias: ${{ parameters.engBranchAlias }} - powershell: >- eng/common/build.ps1 -restore -build -pack -ci -msbuildEngine vs From 2367f784cc4fcac3118bedfe204afd09b3d1c09a Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 10 Sep 2025 16:55:41 -0700 Subject: [PATCH 077/118] Adding debug output for workloadDropNames. --- eng/pipelines/templates/jobs/workload-build.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index a4de24fd7..96e150f2b 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -39,6 +39,10 @@ jobs: parameters: sourceBranch: source engBranch: self + - powershell: | + Write-Host 'workloadDropNames:' + Write-Host '${{ convertToJson(parameters.workloadDropNames) }}' + displayName: 🟣 workloadDropNames - ${{ if eq(parameters.createVSInsertion, true) }}: - task: AzureCLI@2 displayName: 🟣 Download workloads for VS insertion From 863c0f9c0c18eb63f41b9115b83ca3dace70fc07 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 10 Sep 2025 17:35:21 -0700 Subject: [PATCH 078/118] Trying to make the workloadDropNames into a single line. --- eng/pipelines/templates/jobs/workload-build.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 96e150f2b..fb0d8c86c 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -42,6 +42,13 @@ jobs: - powershell: | Write-Host 'workloadDropNames:' Write-Host '${{ convertToJson(parameters.workloadDropNames) }}' + + $workloadDropNamesRaw = @' + ${{ convertToJson(parameters.workloadDropNames) }} + '@ + $workloadDropNames = $workloadDropNamesRaw -replace '\r?\n', '' + Write-Host "workloadDropNames: $workloadDropNames" + Write-Host "##vso[task.setvariable variable=WorkloadDropNames]$workloadDropNames" displayName: 🟣 workloadDropNames - ${{ if eq(parameters.createVSInsertion, true) }}: - task: AzureCLI@2 From 063cff83bb3932ca42521b4a5b31a9d25eba92e0 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 11 Sep 2025 10:38:16 -0700 Subject: [PATCH 079/118] Update regex to properly avoid invalid characters. Updated pipeline parameter display name casing to be consistent. --- eng/pipelines/official.yml | 6 +++--- eng/pipelines/templates/jobs/workload-build.yml | 6 ++++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 2157bb186..10f123ecd 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -10,7 +10,7 @@ appendCommitMessageToRunName: false parameters: - name: sourceBranch - displayName: 🚩 Source Branch 🚩 + displayName: 🚩 Source branch 🚩 type: string - name: dividerAzDO @@ -35,7 +35,7 @@ parameters: type: boolean default: false - name: vsTopicBranch - displayName: 'VS Topic Branch [default: temp/{team}/{target}/yyyy-MM]' + displayName: 'VS topic branch [default: temp/{team}/{target}/yyyy-MM]' type: string default: '|default|' # TODO: This needs fixed for single-entry values. @@ -78,7 +78,7 @@ parameters: type: boolean default: false - name: usePreComponentsForVSInsertion - displayName: Use Preview Components for VS insertion + displayName: Use preview components for VS insertion type: boolean default: false - name: includeNonShippingWorkloads diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index bc822f098..8b71aedb1 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -44,8 +44,10 @@ jobs: # See: https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number - powershell: | Write-Host "##vso[task.setvariable variable=OfficialBuildId]$(Build.BuildNumber)" - # Keep only alphanumeric characters (and space) and truncate to 255 max characters (255 - 14 for build number and delimiter) to avoid run name issues. - $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '[^a-zA-Z0-9 ]', '' | Select-Object -First 241 + # Keep only valid characters. Invalid characters include: " / : < > \ | ? @ * + # Also, strip any trailing '.' characters as those are invalid too. + # Lastly, truncate to 255 max characters: 241 = 255 - 14 (for build number and delimiter, ex: 20250910.13 • ) + $commitMessage = ("$(git log -1 --pretty=%s)".Trim() -replace '["\/:<>\\|?@*]|\.{1,}$', '').Substring(0, 241) Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" displayName: 🟣 Set run name via source branch commit message - ${{ if eq(parameters.createVSInsertion, true) }}: From bf7bc55bb42edd96d5284cf56b8a709e3c42f9ec Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 11 Sep 2025 11:02:39 -0700 Subject: [PATCH 080/118] Remove debug info. Stripped whitespace from single line json. --- .../templates/jobs/workload-build.yml | 22 +++++++++---------- 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index fb0d8c86c..3d78f2df4 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -39,18 +39,16 @@ jobs: parameters: sourceBranch: source engBranch: self - - powershell: | - Write-Host 'workloadDropNames:' - Write-Host '${{ convertToJson(parameters.workloadDropNames) }}' - - $workloadDropNamesRaw = @' - ${{ convertToJson(parameters.workloadDropNames) }} - '@ - $workloadDropNames = $workloadDropNamesRaw -replace '\r?\n', '' - Write-Host "workloadDropNames: $workloadDropNames" - Write-Host "##vso[task.setvariable variable=WorkloadDropNames]$workloadDropNames" - displayName: 🟣 workloadDropNames - ${{ if eq(parameters.createVSInsertion, true) }}: + # The convertToJson expression in AzDO creates "pretty" JSON with line breaks and indentation. + # To simplify passing this JSON to scripts, we collapse it to a single line. + - powershell: | + $workloadDropNames = @' + ${{ convertToJson(parameters.workloadDropNames) }} + '@ + $workloadListJson = $workloadDropNames -replace '\r?\n\s*', '' + Write-Host "##vso[task.setvariable variable=WorkloadListJson]$workloadListJson" + displayName: 🟣 Set WorkloadListJson variable - task: AzureCLI@2 displayName: 🟣 Download workloads for VS insertion inputs: @@ -62,7 +60,7 @@ jobs: -workloadPath '$(System.DefaultWorkingDirectory)/artifacts/workloads' -gitHubPat (ConvertTo-SecureString -String '$(BotAccount-dotnet-bot-repo-PAT)' -AsPlainText -Force) -azDOPat (ConvertTo-SecureString -String '$(dn-bot-all-drop-rw-code-rw-release-all)' -AsPlainText -Force) - -workloadListJson '${{ convertToJson(parameters.workloadDropNames) }}' + -workloadListJson '$(WorkloadListJson)' -usePreComponents:$${{ parameters.usePreComponentsForVSInsertion }} -includeNonShipping:$${{ parameters.includeNonShippingWorkloads }} From 2f033c2761411825489439f8bce606ec33704883 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 11 Sep 2025 11:19:15 -0700 Subject: [PATCH 081/118] Remove todo comment. --- eng/pipelines/official.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 0add76a6e..6d1cefb39 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -34,7 +34,6 @@ parameters: displayName: 'VS Topic Branch [default: temp/{team}/{target}/yyyy-MM]' type: string default: '|default|' -# TODO: This needs fixed for single-entry values. - name: workloadDropNames displayName: Workload drop names type: object From f2f0297b77934a4c16351829fb0ed369c035a4cf Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 11 Sep 2025 11:26:06 -0700 Subject: [PATCH 082/118] Adjust script to account for single workload drop entry. --- eng/download-workloads.ps1 | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/eng/download-workloads.ps1 b/eng/download-workloads.ps1 index 8a2a018e2..33aba436a 100644 --- a/eng/download-workloads.ps1 +++ b/eng/download-workloads.ps1 @@ -8,7 +8,7 @@ # $azDOPat: The Azure DevOps PAT to use for DARC (CI build only). See workload-build.yml for converting the PAT to SecureString. # $workloadListJson: The JSON string of the list of workload drop names to download. If not provided, all workloads found in Version.Details.xml will be downloaded. # - See the workloadDropNames parameter in official.yml for the list generally passed to this script. -# - Example Value: '{["emsdk","mono"]}' +# - Example Value: '["emsdk","mono"]' # $usePreComponents: # - If $true, includes *pre.components.zip drops and excludes *components.zip drops. # - If $false, excludes *pre.components.zip drops and includes *components.zip drops. @@ -51,7 +51,8 @@ $versionDetails = $versionDetailsXml.Dependencies.ProductDependencies.Dependency $workloadFilter = '' if ($workloadListJson) { $workloadList = ConvertFrom-Json -InputObject $workloadListJson - if ($workloadList.Count -ne 0) { + # Using Length accounts for arrays (multiple workloads provided) and strings (single workload provided). + if ($workloadList.Length -ne 0) { $workloadFilter = "($($workloadList | Join-String -Separator '|'))" } } From 15b6681cbbe4743814932c6740f42e85ba30f1c0 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 11 Sep 2025 13:35:53 -0700 Subject: [PATCH 083/118] Separated out the VS insertion logic into a job. Then, created a separate stage and make sure parameters are properly sent between build and insertion. --- eng/pipelines/official.yml | 21 ++++-- .../templates/jobs/workload-build.yml | 61 +--------------- .../templates/jobs/workload-insertion-job.yml | 73 +++++++++++++++++++ 3 files changed, 91 insertions(+), 64 deletions(-) create mode 100644 eng/pipelines/templates/jobs/workload-insertion-job.yml diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 0add76a6e..c224cd0ca 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -149,17 +149,27 @@ extends: createVSInsertion: ${{ parameters.createVSInsertion }} usePreComponentsForVSInsertion: ${{ parameters.usePreComponentsForVSInsertion }} includeNonShippingWorkloads: ${{ parameters.includeNonShippingWorkloads }} - vsTopicBranch: ${{ parameters.vsTopicBranch }} workloadDropNames: ${{ parameters.workloadDropNames }} - primaryVsInsertionBranches: ${{ parameters.primaryVsInsertionBranches }} - secondaryVsInsertionBranches: ${{ parameters.secondaryVsInsertionBranches }} + + - ${{ if eq(parameters.createVSInsertion, true) }}: + - stage: Insertion + displayName: Insertion + dependsOn: Build + jobs: + - template: /eng/pipelines/templates/jobs/workload-insertion-job.yml@self + parameters: + vsTopicBranch: ${{ parameters.vsTopicBranch }} + workloadDropNames: ${{ parameters.workloadDropNames }} + primaryVsInsertionBranches: ${{ parameters.primaryVsInsertionBranches }} + secondaryVsInsertionBranches: ${{ parameters.secondaryVsInsertionBranches }} + - ${{ if or(eq(parameters.publishToAzDO, true), eq(parameters.publishToNuGet, true)) }}: - stage: Publish displayName: Publish dependsOn: Build jobs: - - deployment: PublishFeed - displayName: Publish to feed + - deployment: PublishFeeds + displayName: Publish to feed(s) # Environment: https://dnceng.visualstudio.com/internal/_environments/42 environment: DotNet-SDK-Workloads pool: @@ -173,6 +183,7 @@ extends: runOnce: deploy: steps: + # 1ES docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/inputs - task: 1ES.DownloadPipelineArtifact@1 displayName: 🟣 Download build artifacts inputs: diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index a4de24fd7..c4bc95270 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -1,13 +1,3 @@ -parameters: - # These types match the types handled in create-workload-drops.ps1. There is a RegEx in that script that parses the workload type out of the workload path. - # If this list needs to be updated, the RegEx in the script also needs to be updated. - # The hardcoded list here is necessary as these types need to be available during compilation of the pipeline. - # The workload types become separate variables in the create-workload-drops.ps1 and this list is looped over to create multiple drop tasks. - workloadDropTypes: - - components - - packs - - precomponents - jobs: # Note: jobs.yml automatically includes the Publish Assets job for pushing the artifacts to DARC. - template: /eng/common/templates-official/jobs/jobs.yml@source @@ -27,7 +17,7 @@ jobs: name: Logs manifests: true jobs: - - job: buildRepo + - job: BuildRepo displayName: Build Repo timeoutInMinutes: 120 pool: @@ -67,51 +57,4 @@ jobs: /p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(Build.BuildNumber) /p:StabilizePackageVersion=${{ parameters.stabilizePackageVersion }} - displayName: 🟣 Build solution - - - ${{ if eq(parameters.createVSInsertion, true) }}: - # The variables comprised of workloadShortName and workloadType are set during create-workload-drops.ps1 in Microsoft.NET.Workloads.Vsman.csproj. - - ${{ each workloadShortName in parameters.workloadDropNames }}: - - ${{ each workloadType in parameters.workloadDropTypes }}: - # Only create the drop if the workload drop directory exists. - # Sets the PublishWorkloadDrop variable. See: https://stackoverflow.com/a/60630739/294804 - - pwsh: | - # If the drop folder doesn't exist (not downloaded via DARC), the drop cannot be published. - if (-not (Test-Path -Path '$(${{ workloadShortName }}_${{ workloadType }}_dir)')) { - Write-Host "##vso[task.setvariable variable=PublishWorkloadDrop]False" - Write-Host 'Drop "${{ workloadShortName }}_${{ workloadType }}" was not downloaded via DARC. Skipping VS drop publish...' - return - } - Write-Host "PublishWorkloadDrop: True" - Write-Host "##vso[task.setvariable variable=PublishWorkloadDrop]True" - displayName: 🟣 Check if ${{ workloadShortName }}_${{ workloadType }} needs published - - # Docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/outputs/microbuild-vsts-drop - # YAML reference (original task): https://dev.azure.com/devdiv/Engineering/_git/MicroBuild?path=/src/Tasks/UploadDrop/task.json - - task: 1ES.MicroBuildVstsDrop@1 - displayName: '🟣 Publish VS insertion drop: ${{ workloadShortName }}_${{ workloadType }}' - inputs: - dropFolder: $(${{ workloadShortName }}_${{ workloadType }}_dir) - dropName: $(${{ workloadShortName }}_${{ workloadType }}_name) - # See: https://dev.azure.com/devdiv/DevDiv/_wiki/wikis/DevDiv.wiki/35351/Retain-Drops - dropRetentionDays: 183 - accessToken: $(dn-bot-devdiv-drop-rw-code-rw) - skipUploadIfExists: true - condition: eq(variables['PublishWorkloadDrop'], 'True') - - - ${{ each primaryInsertionBranch in parameters.primaryVsInsertionBranches }}: - # One PR is created per branch defined at the top of this file in the primaryVsInsertionBranches parameter. - - template: /eng/pipelines/templates/steps/workload-insertion.yml@self - parameters: - targetBranch: ${{ primaryInsertionBranch }} - topicBranch: ${{ parameters.vsTopicBranch }} - # PrimaryVSComponentJsonValues variable is set during create-workload-drops.ps1 in Microsoft.NET.Workloads.Vsman.csproj. - componentJsonValues: $(PrimaryVSComponentJsonValues) - - ${{ each secondaryInsertionBranch in parameters.secondaryVsInsertionBranches }}: - # One PR is created per branch defined at the top of this file in the secondaryVsInsertionBranches parameter. - - template: /eng/pipelines/templates/steps/workload-insertion.yml@self - parameters: - targetBranch: ${{ secondaryInsertionBranch }} - topicBranch: ${{ parameters.vsTopicBranch }} - # SecondaryVSComponentJsonValues variable is set during create-workload-drops.ps1 in Microsoft.NET.Workloads.Vsman.csproj. - componentJsonValues: $(SecondaryVSComponentJsonValues) \ No newline at end of file + displayName: 🟣 Build solution \ No newline at end of file diff --git a/eng/pipelines/templates/jobs/workload-insertion-job.yml b/eng/pipelines/templates/jobs/workload-insertion-job.yml new file mode 100644 index 000000000..a9e4c2191 --- /dev/null +++ b/eng/pipelines/templates/jobs/workload-insertion-job.yml @@ -0,0 +1,73 @@ +parameters: + # These types match the types handled in create-workload-drops.ps1. There is a RegEx in that script that parses the workload type out of the workload path. + # If this list needs to be updated, the RegEx in the script also needs to be updated. + # The hardcoded list here is necessary as these types need to be available during compilation of the pipeline. + # The workload types become separate variables in the create-workload-drops.ps1 and this list is looped over to create multiple drop tasks. + workloadDropTypes: + - components + - packs + - precomponents + +jobs: +- job: VsInsertion + displayName: VS Insertion + timeoutInMinutes: 120 + pool: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022 + os: windows + templateContext: + type: releaseJob + isProduction: true + steps: + # 1ES docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/inputs + - task: 1ES.DownloadPipelineArtifact@1 + displayName: 🟣 Download build artifacts + inputs: + artifactName: Artifacts + targetPath: $(System.DefaultWorkingDirectory)/artifacts + # The variables comprised of workloadShortName and workloadType are set during create-workload-drops.ps1 in Microsoft.NET.Workloads.Vsman.csproj. + - ${{ each workloadShortName in parameters.workloadDropNames }}: + - ${{ each workloadType in parameters.workloadDropTypes }}: + # Only create the drop if the workload drop directory exists. + # Sets the PublishWorkloadDrop variable. See: https://stackoverflow.com/a/60630739/294804 + - pwsh: | + # If the drop folder doesn't exist (not downloaded via DARC), the drop cannot be published. + if (-not (Test-Path -Path '$(${{ workloadShortName }}_${{ workloadType }}_dir)')) { + Write-Host "##vso[task.setvariable variable=PublishWorkloadDrop]False" + Write-Host 'Drop "${{ workloadShortName }}_${{ workloadType }}" was not downloaded via DARC. Skipping VS drop publish...' + return + } + Write-Host "PublishWorkloadDrop: True" + Write-Host "##vso[task.setvariable variable=PublishWorkloadDrop]True" + displayName: 🟣 Check if ${{ workloadShortName }}_${{ workloadType }} needs published + + # Docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/outputs/microbuild-vsts-drop + # YAML reference (original task): https://dev.azure.com/devdiv/Engineering/_git/MicroBuild?path=/src/Tasks/UploadDrop/task.json + - task: 1ES.MicroBuildVstsDrop@1 + displayName: '🟣 Publish VS insertion drop: ${{ workloadShortName }}_${{ workloadType }}' + inputs: + dropFolder: $(${{ workloadShortName }}_${{ workloadType }}_dir) + dropName: $(${{ workloadShortName }}_${{ workloadType }}_name) + # See: https://dev.azure.com/devdiv/DevDiv/_wiki/wikis/DevDiv.wiki/35351/Retain-Drops + dropRetentionDays: 183 + accessToken: $(dn-bot-devdiv-drop-rw-code-rw) + skipUploadIfExists: true + condition: eq(variables['PublishWorkloadDrop'], 'True') + + - ${{ each primaryInsertionBranch in parameters.primaryVsInsertionBranches }}: + # One PR is created per branch defined at the top of this file in the primaryVsInsertionBranches parameter. + - template: /eng/pipelines/templates/steps/workload-insertion.yml@self + parameters: + targetBranch: ${{ primaryInsertionBranch }} + topicBranch: ${{ parameters.vsTopicBranch }} + # PrimaryVSComponentJsonValues variable is set during create-workload-drops.ps1 in Microsoft.NET.Workloads.Vsman.csproj. + componentJsonValues: $(PrimaryVSComponentJsonValues) + - ${{ each secondaryInsertionBranch in parameters.secondaryVsInsertionBranches }}: + # One PR is created per branch defined at the top of this file in the secondaryVsInsertionBranches parameter. + - template: /eng/pipelines/templates/steps/workload-insertion.yml@self + parameters: + targetBranch: ${{ secondaryInsertionBranch }} + topicBranch: ${{ parameters.vsTopicBranch }} + # SecondaryVSComponentJsonValues variable is set during create-workload-drops.ps1 in Microsoft.NET.Workloads.Vsman.csproj. + componentJsonValues: $(SecondaryVSComponentJsonValues) \ No newline at end of file From af40554d7c2a0a2bc7df0022e979fdad381602b2 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 11 Sep 2025 14:52:57 -0700 Subject: [PATCH 084/118] Created an artifactJob to allow the artifacts to be accessed by the VS insertion job. --- .../templates/jobs/workload-insertion-job.yml | 29 ++++++++++++++----- 1 file changed, 21 insertions(+), 8 deletions(-) diff --git a/eng/pipelines/templates/jobs/workload-insertion-job.yml b/eng/pipelines/templates/jobs/workload-insertion-job.yml index a9e4c2191..4eb0bbaac 100644 --- a/eng/pipelines/templates/jobs/workload-insertion-job.yml +++ b/eng/pipelines/templates/jobs/workload-insertion-job.yml @@ -9,23 +9,36 @@ parameters: - precomponents jobs: +- job: PrepareArtifacts + displayName: Prepare Artifacts + timeoutInMinutes: 120 + pool: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022 + os: windows + templateContext: + # 1ES docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/artifactjob + type: artifactJob + # 1ES docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/inputs + inputs: + - input: pipelineArtifact + artifactName: Artifacts + targetPath: $(System.DefaultWorkingDirectory)/artifacts + outputs: + - output: pipelineArtifact + artifactName: PreparedArtifacts + targetPath: $(System.DefaultWorkingDirectory)/artifacts - job: VsInsertion displayName: VS Insertion + dependsOn: PrepareArtifacts timeoutInMinutes: 120 pool: name: $(DncEngInternalBuildPool) image: 1es-windows-2022 os: windows templateContext: - type: releaseJob - isProduction: true + type: buildJob steps: - # 1ES docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/inputs - - task: 1ES.DownloadPipelineArtifact@1 - displayName: 🟣 Download build artifacts - inputs: - artifactName: Artifacts - targetPath: $(System.DefaultWorkingDirectory)/artifacts # The variables comprised of workloadShortName and workloadType are set during create-workload-drops.ps1 in Microsoft.NET.Workloads.Vsman.csproj. - ${{ each workloadShortName in parameters.workloadDropNames }}: - ${{ each workloadType in parameters.workloadDropTypes }}: From 24f07f4dbe9bd31db943e4ba8316403bab3b9b7e Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 11 Sep 2025 14:55:08 -0700 Subject: [PATCH 085/118] Wrong indentation for inputs and outputs. --- .../templates/jobs/workload-insertion-job.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/eng/pipelines/templates/jobs/workload-insertion-job.yml b/eng/pipelines/templates/jobs/workload-insertion-job.yml index 4eb0bbaac..50456fa40 100644 --- a/eng/pipelines/templates/jobs/workload-insertion-job.yml +++ b/eng/pipelines/templates/jobs/workload-insertion-job.yml @@ -19,15 +19,15 @@ jobs: templateContext: # 1ES docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/artifactjob type: artifactJob - # 1ES docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/inputs - inputs: - - input: pipelineArtifact - artifactName: Artifacts - targetPath: $(System.DefaultWorkingDirectory)/artifacts - outputs: - - output: pipelineArtifact - artifactName: PreparedArtifacts - targetPath: $(System.DefaultWorkingDirectory)/artifacts + # 1ES docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/inputs + inputs: + - input: pipelineArtifact + artifactName: Artifacts + targetPath: $(System.DefaultWorkingDirectory)/artifacts + outputs: + - output: pipelineArtifact + artifactName: PreparedArtifacts + targetPath: $(System.DefaultWorkingDirectory)/artifacts - job: VsInsertion displayName: VS Insertion dependsOn: PrepareArtifacts From 3b1e629fb15aa94114bbf4713852a50873a8fb14 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 11 Sep 2025 15:55:48 -0700 Subject: [PATCH 086/118] Reverted separate artifactsJob. Added feature flag to allow vsts drops in a release job. --- eng/pipelines/official.yml | 2 ++ .../templates/jobs/workload-insertion-job.yml | 29 +++++-------------- 2 files changed, 10 insertions(+), 21 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index c224cd0ca..3845b95ce 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -120,6 +120,8 @@ resources: extends: template: v1/1ES.Official.PipelineTemplate.yml@1esPipelines parameters: + featureFlags: + allowMicroBuildVstsDropInReleaseJob: true sdl: # Docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/sdlanalysis/sourceanalysisstage#my-pipeline-uses-multiple-repositories-how-to-ensure-that-sdl-sources-stage-is-injected-for-all-the-repositories sourceRepositoriesToScan: diff --git a/eng/pipelines/templates/jobs/workload-insertion-job.yml b/eng/pipelines/templates/jobs/workload-insertion-job.yml index 50456fa40..a9e4c2191 100644 --- a/eng/pipelines/templates/jobs/workload-insertion-job.yml +++ b/eng/pipelines/templates/jobs/workload-insertion-job.yml @@ -9,36 +9,23 @@ parameters: - precomponents jobs: -- job: PrepareArtifacts - displayName: Prepare Artifacts - timeoutInMinutes: 120 - pool: - name: $(DncEngInternalBuildPool) - image: 1es-windows-2022 - os: windows - templateContext: - # 1ES docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/artifactjob - type: artifactJob - # 1ES docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/inputs - inputs: - - input: pipelineArtifact - artifactName: Artifacts - targetPath: $(System.DefaultWorkingDirectory)/artifacts - outputs: - - output: pipelineArtifact - artifactName: PreparedArtifacts - targetPath: $(System.DefaultWorkingDirectory)/artifacts - job: VsInsertion displayName: VS Insertion - dependsOn: PrepareArtifacts timeoutInMinutes: 120 pool: name: $(DncEngInternalBuildPool) image: 1es-windows-2022 os: windows templateContext: - type: buildJob + type: releaseJob + isProduction: true steps: + # 1ES docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/inputs + - task: 1ES.DownloadPipelineArtifact@1 + displayName: 🟣 Download build artifacts + inputs: + artifactName: Artifacts + targetPath: $(System.DefaultWorkingDirectory)/artifacts # The variables comprised of workloadShortName and workloadType are set during create-workload-drops.ps1 in Microsoft.NET.Workloads.Vsman.csproj. - ${{ each workloadShortName in parameters.workloadDropNames }}: - ${{ each workloadType in parameters.workloadDropTypes }}: From 2e26447c4f96c6b243df8c47a26c062d214540f9 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 11 Sep 2025 16:19:57 -0700 Subject: [PATCH 087/118] Trying secondary solution with insertion being a buildJob and not using 1ES variant for downloading. --- eng/pipelines/official.yml | 2 -- eng/pipelines/templates/jobs/workload-insertion-job.yml | 6 +++--- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 3845b95ce..c224cd0ca 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -120,8 +120,6 @@ resources: extends: template: v1/1ES.Official.PipelineTemplate.yml@1esPipelines parameters: - featureFlags: - allowMicroBuildVstsDropInReleaseJob: true sdl: # Docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/sdlanalysis/sourceanalysisstage#my-pipeline-uses-multiple-repositories-how-to-ensure-that-sdl-sources-stage-is-injected-for-all-the-repositories sourceRepositoriesToScan: diff --git a/eng/pipelines/templates/jobs/workload-insertion-job.yml b/eng/pipelines/templates/jobs/workload-insertion-job.yml index a9e4c2191..695eca533 100644 --- a/eng/pipelines/templates/jobs/workload-insertion-job.yml +++ b/eng/pipelines/templates/jobs/workload-insertion-job.yml @@ -17,11 +17,11 @@ jobs: image: 1es-windows-2022 os: windows templateContext: - type: releaseJob - isProduction: true + type: buildJob steps: + # This task, without the '1ES.' prefix, is not blocked on a buildJob type context. # 1ES docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/inputs - - task: 1ES.DownloadPipelineArtifact@1 + - task: DownloadPipelineArtifact@2 displayName: 🟣 Download build artifacts inputs: artifactName: Artifacts From 538fab221ba4fc3fd837cb2748bb66da33bd8abe Mon Sep 17 00:00:00 2001 From: GitOps Date: Fri, 12 Sep 2025 04:22:53 +0000 Subject: [PATCH 088/118] Inventory drift --- es-metadata.yml | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 es-metadata.yml diff --git a/es-metadata.yml b/es-metadata.yml new file mode 100644 index 000000000..af486ad81 --- /dev/null +++ b/es-metadata.yml @@ -0,0 +1,8 @@ +schemaVersion: 0.0.1 +isProduction: true +accountableOwners: + service: 30f635d8-2918-48af-8ddf-d9bc854b7584 +routing: + defaultAreaPath: + org: devdiv + path: DevDiv\NET Tools\SDK From fb5a64a42d646c567e1b137f51cfb9eb6c14b124 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 12 Sep 2025 11:26:28 -0700 Subject: [PATCH 089/118] Made the variables during the build accessible as stage variables. Made the workloads published as artifacts. --- eng/create-workload-drops.ps1 | 10 ++++---- .../templates/jobs/workload-build.yml | 11 ++++++++- .../templates/jobs/workload-insertion-job.yml | 23 ++++++++++--------- 3 files changed, 27 insertions(+), 17 deletions(-) diff --git a/eng/create-workload-drops.ps1 b/eng/create-workload-drops.ps1 index e00c595ac..c42d950d2 100644 --- a/eng/create-workload-drops.ps1 +++ b/eng/create-workload-drops.ps1 @@ -57,9 +57,9 @@ Get-ChildItem -Path $workloadDropPath -Directory | ForEach-Object { $dropType = $Matches.type.Replace('.', '') $dropUrl = "https://vsdrop.microsoft.com/file/v1/$vsDropName;$assemblyName.vsman" - Write-Host "##vso[task.setvariable variable=$($shortName)_$($dropType)_name]$vsDropName" - Write-Host "##vso[task.setvariable variable=$($shortName)_$($dropType)_dir]$dropDir" - Write-Host "##vso[task.setvariable variable=$($shortName)_$($dropType)_url]$dropUrl" + Write-Host "##vso[task.setvariable variable=$($shortName)_$($dropType)_name;isoutput=true]$vsDropName" + Write-Host "##vso[task.setvariable variable=$($shortName)_$($dropType)_dir;isoutput=true]$dropDir" + Write-Host "##vso[task.setvariable variable=$($shortName)_$($dropType)_url;isoutput=true]$dropUrl" # Each vsman file is comma-separated. First .vsman is destination and the second is source. $vsComponentValue = "$assemblyName.vsman{$workloadVersion}=$dropUrl," @@ -83,10 +83,10 @@ $null = Get-ChildItem -Path $workloadDropPath -Include *.json, *.vsmand, files.t if ($primaryVSComponentJsonValues) { # Remove the trailing comma. $primaryVSComponentJsonValues = $primaryVSComponentJsonValues -replace '.$' - Write-Host "##vso[task.setvariable variable=PrimaryVSComponentJsonValues]$primaryVSComponentJsonValues" + Write-Host "##vso[task.setvariable variable=PrimaryVSComponentJsonValues;isoutput=true]$primaryVSComponentJsonValues" } if ($secondaryVSComponentJsonValues) { # Remove the trailing comma. $secondaryVSComponentJsonValues = $secondaryVSComponentJsonValues -replace '.$' - Write-Host "##vso[task.setvariable variable=SecondaryVSComponentJsonValues]$secondaryVSComponentJsonValues" + Write-Host "##vso[task.setvariable variable=SecondaryVSComponentJsonValues;isoutput=true]$secondaryVSComponentJsonValues" } \ No newline at end of file diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index c4bc95270..6ed519f07 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -57,4 +57,13 @@ jobs: /p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(Build.BuildNumber) /p:StabilizePackageVersion=${{ parameters.stabilizePackageVersion }} - displayName: 🟣 Build solution \ No newline at end of file + displayName: 🟣 Build solution + # Name is required to reference the variables created within this build step in other stages. + name: BuildSolution + + - ${{ if eq(parameters.createVSInsertion, true) }}: + - task: 1ES.PublishPipelineArtifact@1 + displayName: 🟣 Publish workload artifacts + inputs: + artifact: Workloads + path: $(System.DefaultWorkingDirectory)/artifacts/workloads \ No newline at end of file diff --git a/eng/pipelines/templates/jobs/workload-insertion-job.yml b/eng/pipelines/templates/jobs/workload-insertion-job.yml index 695eca533..1abde0774 100644 --- a/eng/pipelines/templates/jobs/workload-insertion-job.yml +++ b/eng/pipelines/templates/jobs/workload-insertion-job.yml @@ -20,12 +20,12 @@ jobs: type: buildJob steps: # This task, without the '1ES.' prefix, is not blocked on a buildJob type context. - # 1ES docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/inputs + # 1ES docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/inputs/pipeline-artifact - task: DownloadPipelineArtifact@2 - displayName: 🟣 Download build artifacts + displayName: 🟣 Download workload artifacts inputs: - artifactName: Artifacts - targetPath: $(System.DefaultWorkingDirectory)/artifacts + artifactName: Workloads + targetPath: $(System.DefaultWorkingDirectory)/artifacts/workloads # The variables comprised of workloadShortName and workloadType are set during create-workload-drops.ps1 in Microsoft.NET.Workloads.Vsman.csproj. - ${{ each workloadShortName in parameters.workloadDropNames }}: - ${{ each workloadType in parameters.workloadDropTypes }}: @@ -33,7 +33,8 @@ jobs: # Sets the PublishWorkloadDrop variable. See: https://stackoverflow.com/a/60630739/294804 - pwsh: | # If the drop folder doesn't exist (not downloaded via DARC), the drop cannot be published. - if (-not (Test-Path -Path '$(${{ workloadShortName }}_${{ workloadType }}_dir)')) { + $workloadDirectory = "$[ stageDependencies.Build.BuildRepo.outputs['BuildSolution.${{ workloadShortName }}_${{ workloadType }}_dir'] ]" + if (-not (Test-Path -Path $workloadDirectory)) { Write-Host "##vso[task.setvariable variable=PublishWorkloadDrop]False" Write-Host 'Drop "${{ workloadShortName }}_${{ workloadType }}" was not downloaded via DARC. Skipping VS drop publish...' return @@ -47,8 +48,8 @@ jobs: - task: 1ES.MicroBuildVstsDrop@1 displayName: '🟣 Publish VS insertion drop: ${{ workloadShortName }}_${{ workloadType }}' inputs: - dropFolder: $(${{ workloadShortName }}_${{ workloadType }}_dir) - dropName: $(${{ workloadShortName }}_${{ workloadType }}_name) + dropFolder: $[ stageDependencies.Build.BuildRepo.outputs['BuildSolution.${{ workloadShortName }}_${{ workloadType }}_dir'] ] + dropName: $[ stageDependencies.Build.BuildRepo.outputs['BuildSolution.${{ workloadShortName }}_${{ workloadType }}_name'] ] # See: https://dev.azure.com/devdiv/DevDiv/_wiki/wikis/DevDiv.wiki/35351/Retain-Drops dropRetentionDays: 183 accessToken: $(dn-bot-devdiv-drop-rw-code-rw) @@ -56,18 +57,18 @@ jobs: condition: eq(variables['PublishWorkloadDrop'], 'True') - ${{ each primaryInsertionBranch in parameters.primaryVsInsertionBranches }}: - # One PR is created per branch defined at the top of this file in the primaryVsInsertionBranches parameter. + # One PR is created per branch defined at the top of official.yml in the primaryVsInsertionBranches parameter. - template: /eng/pipelines/templates/steps/workload-insertion.yml@self parameters: targetBranch: ${{ primaryInsertionBranch }} topicBranch: ${{ parameters.vsTopicBranch }} # PrimaryVSComponentJsonValues variable is set during create-workload-drops.ps1 in Microsoft.NET.Workloads.Vsman.csproj. - componentJsonValues: $(PrimaryVSComponentJsonValues) + componentJsonValues: $[ stageDependencies.Build.BuildRepo.outputs['BuildSolution.PrimaryVSComponentJsonValues'] ] - ${{ each secondaryInsertionBranch in parameters.secondaryVsInsertionBranches }}: - # One PR is created per branch defined at the top of this file in the secondaryVsInsertionBranches parameter. + # One PR is created per branch defined at the top of official.yml in the secondaryVsInsertionBranches parameter. - template: /eng/pipelines/templates/steps/workload-insertion.yml@self parameters: targetBranch: ${{ secondaryInsertionBranch }} topicBranch: ${{ parameters.vsTopicBranch }} # SecondaryVSComponentJsonValues variable is set during create-workload-drops.ps1 in Microsoft.NET.Workloads.Vsman.csproj. - componentJsonValues: $(SecondaryVSComponentJsonValues) \ No newline at end of file + componentJsonValues: $[ stageDependencies.Build.BuildRepo.outputs['BuildSolution.SecondaryVSComponentJsonValues'] ] \ No newline at end of file From 6a048675754aa2d5a5638b00ab5be6825f2762b3 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 12 Sep 2025 13:12:29 -0700 Subject: [PATCH 090/118] Moved drop hub URL to be part of workload upload check. Set stage variables to local variables for the insertion stage. --- eng/create-workload-drops.ps1 | 6 +--- .../templates/jobs/workload-insertion-job.yml | 31 ++++++++++++++----- .../templates/steps/workload-insertion.yml | 2 +- 3 files changed, 25 insertions(+), 14 deletions(-) diff --git a/eng/create-workload-drops.ps1 b/eng/create-workload-drops.ps1 index c42d950d2..9e2ed256c 100644 --- a/eng/create-workload-drops.ps1 +++ b/eng/create-workload-drops.ps1 @@ -55,12 +55,11 @@ Get-ChildItem -Path $workloadDropPath -Directory | ForEach-Object { $shortName = "$($Matches.short)" # Remove the '.' from 'pre.components' $dropType = $Matches.type.Replace('.', '') - $dropUrl = "https://vsdrop.microsoft.com/file/v1/$vsDropName;$assemblyName.vsman" Write-Host "##vso[task.setvariable variable=$($shortName)_$($dropType)_name;isoutput=true]$vsDropName" Write-Host "##vso[task.setvariable variable=$($shortName)_$($dropType)_dir;isoutput=true]$dropDir" - Write-Host "##vso[task.setvariable variable=$($shortName)_$($dropType)_url;isoutput=true]$dropUrl" + $dropUrl = "https://vsdrop.microsoft.com/file/v1/$vsDropName;$assemblyName.vsman" # Each vsman file is comma-separated. First .vsman is destination and the second is source. $vsComponentValue = "$assemblyName.vsman{$workloadVersion}=$dropUrl," # All VS components are added to the primary VS component JSON string. @@ -71,9 +70,6 @@ Get-ChildItem -Path $workloadDropPath -Directory | ForEach-Object { $secondaryVSComponentJsonValues += $vsComponentValue } } - - Write-Host '!!! After upload, your workload drop will be available at:' - Write-Host "https://devdiv.visualstudio.com/_apps/hub/ms-vscs-artifact.build-tasks.drop-hub-group-explorer-hub?name=$vsDropName" } # Clean up intermediate build files in the workload drop folders. diff --git a/eng/pipelines/templates/jobs/workload-insertion-job.yml b/eng/pipelines/templates/jobs/workload-insertion-job.yml index 1abde0774..087ba0c5f 100644 --- a/eng/pipelines/templates/jobs/workload-insertion-job.yml +++ b/eng/pipelines/templates/jobs/workload-insertion-job.yml @@ -18,6 +18,17 @@ jobs: os: windows templateContext: type: buildJob + variables: + - name: PrimaryVSComponentJsonValues + value: $[ stageDependencies.Build.BuildRepo.outputs['BuildSolution.PrimaryVSComponentJsonValues'] ] + - name: SecondaryVSComponentJsonValues + value: $[ stageDependencies.Build.BuildRepo.outputs['BuildSolution.SecondaryVSComponentJsonValues'] ] + - ${{ each workloadShortName in parameters.workloadDropNames }}: + - ${{ each workloadType in parameters.workloadDropTypes }}: + - name: ${{ workloadShortName }}_${{ workloadType }}_dir + value: $[ stageDependencies.Build.BuildRepo.outputs['BuildSolution.${{ workloadShortName }}_${{ workloadType }}_dir'] ] + - name: ${{ workloadShortName }}_${{ workloadType }}_name + value: $[ stageDependencies.Build.BuildRepo.outputs['BuildSolution.${{ workloadShortName }}_${{ workloadType }}_name'] ] steps: # This task, without the '1ES.' prefix, is not blocked on a buildJob type context. # 1ES docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/inputs/pipeline-artifact @@ -33,23 +44,27 @@ jobs: # Sets the PublishWorkloadDrop variable. See: https://stackoverflow.com/a/60630739/294804 - pwsh: | # If the drop folder doesn't exist (not downloaded via DARC), the drop cannot be published. - $workloadDirectory = "$[ stageDependencies.Build.BuildRepo.outputs['BuildSolution.${{ workloadShortName }}_${{ workloadType }}_dir'] ]" + $workloadDirectory = '$(${{ workloadShortName }}_${{ workloadType }}_dir)' + Write-Host "Checking for workload directory: $workloadDirectory" if (-not (Test-Path -Path $workloadDirectory)) { Write-Host "##vso[task.setvariable variable=PublishWorkloadDrop]False" Write-Host 'Drop "${{ workloadShortName }}_${{ workloadType }}" was not downloaded via DARC. Skipping VS drop publish...' return } - Write-Host "PublishWorkloadDrop: True" + Write-Host 'Drop "${{ workloadShortName }}_${{ workloadType }}" exists. Proceeding with VS drop publish...' + $vsDropName = '$(${{ workloadShortName }}_${{ workloadType }}_name)' + Write-Host 'After publishing, your workload drop will be available at:' + Write-Host "https://devdiv.visualstudio.com/_apps/hub/ms-vscs-artifact.build-tasks.drop-hub-group-explorer-hub?name=$vsDropName" Write-Host "##vso[task.setvariable variable=PublishWorkloadDrop]True" - displayName: 🟣 Check if ${{ workloadShortName }}_${{ workloadType }} needs published + displayName: '🟣 Check drop: ${{ workloadShortName }}_${{ workloadType }}' # Docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/outputs/microbuild-vsts-drop # YAML reference (original task): https://dev.azure.com/devdiv/Engineering/_git/MicroBuild?path=/src/Tasks/UploadDrop/task.json - task: 1ES.MicroBuildVstsDrop@1 - displayName: '🟣 Publish VS insertion drop: ${{ workloadShortName }}_${{ workloadType }}' + displayName: '🟣 Publish drop: ${{ workloadShortName }}_${{ workloadType }}' inputs: - dropFolder: $[ stageDependencies.Build.BuildRepo.outputs['BuildSolution.${{ workloadShortName }}_${{ workloadType }}_dir'] ] - dropName: $[ stageDependencies.Build.BuildRepo.outputs['BuildSolution.${{ workloadShortName }}_${{ workloadType }}_name'] ] + dropFolder: $(${{ workloadShortName }}_${{ workloadType }}_dir) + dropName: $(${{ workloadShortName }}_${{ workloadType }}_name) # See: https://dev.azure.com/devdiv/DevDiv/_wiki/wikis/DevDiv.wiki/35351/Retain-Drops dropRetentionDays: 183 accessToken: $(dn-bot-devdiv-drop-rw-code-rw) @@ -63,7 +78,7 @@ jobs: targetBranch: ${{ primaryInsertionBranch }} topicBranch: ${{ parameters.vsTopicBranch }} # PrimaryVSComponentJsonValues variable is set during create-workload-drops.ps1 in Microsoft.NET.Workloads.Vsman.csproj. - componentJsonValues: $[ stageDependencies.Build.BuildRepo.outputs['BuildSolution.PrimaryVSComponentJsonValues'] ] + componentJsonValues: $(PrimaryVSComponentJsonValues) - ${{ each secondaryInsertionBranch in parameters.secondaryVsInsertionBranches }}: # One PR is created per branch defined at the top of official.yml in the secondaryVsInsertionBranches parameter. - template: /eng/pipelines/templates/steps/workload-insertion.yml@self @@ -71,4 +86,4 @@ jobs: targetBranch: ${{ secondaryInsertionBranch }} topicBranch: ${{ parameters.vsTopicBranch }} # SecondaryVSComponentJsonValues variable is set during create-workload-drops.ps1 in Microsoft.NET.Workloads.Vsman.csproj. - componentJsonValues: $[ stageDependencies.Build.BuildRepo.outputs['BuildSolution.SecondaryVSComponentJsonValues'] ] \ No newline at end of file + componentJsonValues: $(SecondaryVSComponentJsonValues) \ No newline at end of file diff --git a/eng/pipelines/templates/steps/workload-insertion.yml b/eng/pipelines/templates/steps/workload-insertion.yml index 4876a777a..d974467ae 100644 --- a/eng/pipelines/templates/steps/workload-insertion.yml +++ b/eng/pipelines/templates/steps/workload-insertion.yml @@ -25,7 +25,7 @@ steps: # Documentation: https://devdiv.visualstudio.com/DevDiv/_wiki/wikis/DevDiv.wiki/635/Overview?anchor=**build-pipeline** # YAML reference: https://dev.azure.com/devdiv/Engineering/_git/MicroBuild?path=/src/Tasks/InsertVsPayload/task.json - task: MicroBuildInsertVsPayload@5 - displayName: '🟣 Create VS insertion: ${{ parameters.targetBranch }}' + displayName: '🟣 Create insertion: ${{ parameters.targetBranch }}' inputs: # Connection: https://dnceng.visualstudio.com/internal/_settings/adminservices?resourceId=5708f1c1-8fb7-4722-a643-020cf89ee26f # Docs: https://devdiv.visualstudio.com/DevDiv/_wiki/wikis/DevDiv.wiki/40477/Insert-with-a-WIF-Service-Connection From bc3a7f3b7e4326588d7e5e11bc49a2e656bc1a14 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 12 Sep 2025 13:15:22 -0700 Subject: [PATCH 091/118] Rename the workload insertion steps file to make the name more accurate. --- .../{workload-insertion.yml => workload-insertion-steps.yml} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename eng/pipelines/templates/steps/{workload-insertion.yml => workload-insertion-steps.yml} (100%) diff --git a/eng/pipelines/templates/steps/workload-insertion.yml b/eng/pipelines/templates/steps/workload-insertion-steps.yml similarity index 100% rename from eng/pipelines/templates/steps/workload-insertion.yml rename to eng/pipelines/templates/steps/workload-insertion-steps.yml From 1054083ef510ebdadf7378baee9b92793fc70f92 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 12 Sep 2025 13:16:35 -0700 Subject: [PATCH 092/118] Forgot to update usage of the steps template. --- eng/pipelines/templates/jobs/workload-insertion-job.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/eng/pipelines/templates/jobs/workload-insertion-job.yml b/eng/pipelines/templates/jobs/workload-insertion-job.yml index 087ba0c5f..e610557fc 100644 --- a/eng/pipelines/templates/jobs/workload-insertion-job.yml +++ b/eng/pipelines/templates/jobs/workload-insertion-job.yml @@ -73,7 +73,7 @@ jobs: - ${{ each primaryInsertionBranch in parameters.primaryVsInsertionBranches }}: # One PR is created per branch defined at the top of official.yml in the primaryVsInsertionBranches parameter. - - template: /eng/pipelines/templates/steps/workload-insertion.yml@self + - template: /eng/pipelines/templates/steps/workload-insertion-steps.yml@self parameters: targetBranch: ${{ primaryInsertionBranch }} topicBranch: ${{ parameters.vsTopicBranch }} @@ -81,7 +81,7 @@ jobs: componentJsonValues: $(PrimaryVSComponentJsonValues) - ${{ each secondaryInsertionBranch in parameters.secondaryVsInsertionBranches }}: # One PR is created per branch defined at the top of official.yml in the secondaryVsInsertionBranches parameter. - - template: /eng/pipelines/templates/steps/workload-insertion.yml@self + - template: /eng/pipelines/templates/steps/workload-insertion-steps.yml@self parameters: targetBranch: ${{ secondaryInsertionBranch }} topicBranch: ${{ parameters.vsTopicBranch }} From 8d556881e15be25b1c956c10abb09d54862b5df7 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 12 Sep 2025 14:54:21 -0700 Subject: [PATCH 093/118] Added checkout for insertion so the working directory matches the drop paths. Added different error message when drop type is not being acquired. --- .../templates/jobs/workload-insertion-job.yml | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/eng/pipelines/templates/jobs/workload-insertion-job.yml b/eng/pipelines/templates/jobs/workload-insertion-job.yml index e610557fc..f65dbd4cc 100644 --- a/eng/pipelines/templates/jobs/workload-insertion-job.yml +++ b/eng/pipelines/templates/jobs/workload-insertion-job.yml @@ -20,16 +20,22 @@ jobs: type: buildJob variables: - name: PrimaryVSComponentJsonValues - value: $[ stageDependencies.Build.BuildRepo.outputs['BuildSolution.PrimaryVSComponentJsonValues'] ] + value: $[stageDependencies.Build.BuildRepo.outputs['BuildSolution.PrimaryVSComponentJsonValues']] - name: SecondaryVSComponentJsonValues - value: $[ stageDependencies.Build.BuildRepo.outputs['BuildSolution.SecondaryVSComponentJsonValues'] ] + value: $[stageDependencies.Build.BuildRepo.outputs['BuildSolution.SecondaryVSComponentJsonValues']] - ${{ each workloadShortName in parameters.workloadDropNames }}: - ${{ each workloadType in parameters.workloadDropTypes }}: - name: ${{ workloadShortName }}_${{ workloadType }}_dir - value: $[ stageDependencies.Build.BuildRepo.outputs['BuildSolution.${{ workloadShortName }}_${{ workloadType }}_dir'] ] + value: $[stageDependencies.Build.BuildRepo.outputs['BuildSolution.${{ workloadShortName }}_${{ workloadType }}_dir']] - name: ${{ workloadShortName }}_${{ workloadType }}_name - value: $[ stageDependencies.Build.BuildRepo.outputs['BuildSolution.${{ workloadShortName }}_${{ workloadType }}_name'] ] + value: $[stageDependencies.Build.BuildRepo.outputs['BuildSolution.${{ workloadShortName }}_${{ workloadType }}_name']] steps: + # Note: Checkout isn't necessary for the files it acquires, but for setting the working directory to match the build job (eg. D:\a\_work\1\source-branch). + # Otherwise, the workload drop directory path variables won't match the path on disk. + - template: /eng/pipelines/templates/steps/workload-checkout.yml@self + parameters: + sourceBranch: source + engBranch: self # This task, without the '1ES.' prefix, is not blocked on a buildJob type context. # 1ES docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/inputs/pipeline-artifact - task: DownloadPipelineArtifact@2 @@ -45,6 +51,11 @@ jobs: - pwsh: | # If the drop folder doesn't exist (not downloaded via DARC), the drop cannot be published. $workloadDirectory = '$(${{ workloadShortName }}_${{ workloadType }}_dir)' + if (-not $workloadDirectory) { + Write-Host "##vso[task.setvariable variable=PublishWorkloadDrop]False" + Write-Host 'Drop type "${{ workloadType }}" is not being processed. Skipping VS drop publish...' + return + } Write-Host "Checking for workload directory: $workloadDirectory" if (-not (Test-Path -Path $workloadDirectory)) { Write-Host "##vso[task.setvariable variable=PublishWorkloadDrop]False" From b3412560285af984aeb1ce776910f0e2db5796d3 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 17 Sep 2025 18:04:28 -0700 Subject: [PATCH 094/118] In-progress of creating version parameter logic. --- eng/pipelines/official.yml | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 0ca420a6d..d3d62180a 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -81,6 +81,27 @@ parameters: type: boolean default: false +- name: dividerSetVersion + displayName: '[ ######## SET VERSION ######## ]' + type: boolean + default: false +- name: setVersionMajor + displayName: Set version major + type: string + default: '|default|' +- name: setVersionMinor + displayName: Set version minor (one digit) + type: string + default: '|default|' +- name: setVersionFeature + displayName: Set version feature (two digits) + type: string + default: '|default|' +- name: setVersionPatch + displayName: Set version patch + type: string + default: '|default|' + variables: # Variables used: DncEngInternalBuildPool - template: /eng/common/templates-official/variables/pool-providers.yml@source From d1437b7795c3d78132bbed3eab1c660b94e311c2 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Thu, 18 Sep 2025 16:17:09 -0700 Subject: [PATCH 095/118] Began adding scripting logic for generating the overrides props file. --- eng/create-version-override.ps1 | 54 +++++++++++++++++++ eng/pipelines/official.yml | 35 +++++++++--- .../templates/jobs/workload-build.yml | 3 +- 3 files changed, 84 insertions(+), 8 deletions(-) create mode 100644 eng/create-version-override.ps1 diff --git a/eng/create-version-override.ps1 b/eng/create-version-override.ps1 new file mode 100644 index 000000000..86a44aad6 --- /dev/null +++ b/eng/create-version-override.ps1 @@ -0,0 +1,54 @@ +# Using the downloaded workloads, this creates the VS drops to upload for VS insertion. +# It builds the Microsoft.NET.Workloads.Vsman.vsmanproj per workload ZIP, which creates the appropriate VSMAN file. + +# $workloadPath: The path to the directory containing the workload ZIPs, usually the output path used by DARC in the download-workloads.ps1 script. +# - Example Value: "$(RepoRoot)artifacts\workloads" +# $msBuildToolsPath: The path to the MSBuild tools directory, generally $(MSBuildToolsPath) in MSBuild. +# - Example Value: 'C:\Program Files\Microsoft Visual Studio\2022\Preview\MSBuild\Current\Bin' + +param ([bool] $createTestWorkloadSet = $false, [string] $sdkVersionMinor = '|default|', [string] $versionFeature = '|default|', [string] $versionPatch = '|default|', [string] $preReleaseVersionLabel = '|default|', [string] $preReleaseVersionIteration = '|default|') + +$containsNonDefault = ($sdkVersionMinor, $versionFeature, $versionPatch, $preReleaseVersionLabel, $preReleaseVersionIteration | Where-Object { $_ -ne '|default|' }) -ne $null + +if (-not $containsNonDefault -and -not $createTestWorkloadSet) { + Write-Host "No version overrides to apply." + exit 0 +} + +$xmlDoc = New-Object System.Xml.XmlDocument +$projectElement = $xmlDoc.CreateElement("Project") +$xmlDoc.AppendChild($rootElement) + +$propertyGroup1Element = $xmlDoc.CreateElement("PropertyGroup") +$projectElement.AppendChild($propertyGroup1Element) + +$propertyGroup2Element = $xmlDoc.CreateElement("PropertyGroup") +$projectElement.AppendChild($propertyGroup2Element) + + +$settingElement.SetAttribute("Name", "LogLevel") +$settingElement.InnerText = "Debug" + +$xmlDoc.Save("D:\Workspace\TestMe.xml") + + + +# +# +# 3 +# 05 +# 0 +# rc +# 1 +# +# +# $(VersionMajor).$(VersionSDKMinor)$(VersionFeature).$(VersionPatch) +# $(VersionMajor).$(VersionMinor).$(VersionSDKMinor)$(VersionFeature) +# $(WorkloadsVersion).$(VersionPatch) +# $(VersionMajor).$(VersionMinor).$(VersionSDKMinor)00 +# $(SdkFeatureBand)-$(PreReleaseVersionLabel).$(PreReleaseVersionIteration) +# +# $(PreReleaseVersionIteration).0 +# 0 +# +# \ No newline at end of file diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index d3d62180a..d45236f44 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -85,12 +85,16 @@ parameters: displayName: '[ ######## SET VERSION ######## ]' type: boolean default: false -- name: setVersionMajor - displayName: Set version major - type: string - default: '|default|' -- name: setVersionMinor - displayName: Set version minor (one digit) +# - name: setVersionMajor +# displayName: Set version major +# type: string +# default: '|default|' +# - name: setVersionMinor +# displayName: Set version minor +# type: string +# default: '|default|' +- name: setSdkVersionMinor + displayName: Set SDK version minor (one digit) type: string default: '|default|' - name: setVersionFeature @@ -101,6 +105,25 @@ parameters: displayName: Set version patch type: string default: '|default|' +- name: setPreReleaseVersionLabel + displayName: Set pre-release version label + type: string + default: '|default|' + values: + - '|default|' + - servicing + - preview + - rc + - alpha + - rtm +- name: setPreReleaseVersionIteration + displayName: Set pre-release version iteration + type: string + default: '|default|' +- name: createTestWorkloadSet + displayName: Create a test workload set + type: boolean + default: false variables: # Variables used: DncEngInternalBuildPool diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 1f0847f3d..3e36fe0c6 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -53,7 +53,7 @@ jobs: -workloadListJson '$(WorkloadListJson)' -usePreComponents:$${{ parameters.usePreComponentsForVSInsertion }} -includeNonShipping:$${{ parameters.includeNonShippingWorkloads }} - + - powershell: 'Insert script here' # https://github.com/dotnet/arcade/blob/ccae251ef033746eb0213329953f5e3c1687693b/Documentation/CorePackages/Publishing.md#basic-onboarding-scenario-for-new-repositories-to-the-current-publishing-version-v3 - powershell: >- eng/common/build.ps1 @@ -69,7 +69,6 @@ jobs: displayName: 🟣 Build solution # Name is required to reference the variables created within this build step in other stages. name: BuildSolution - - ${{ if eq(parameters.createVSInsertion, true) }}: - task: 1ES.PublishPipelineArtifact@1 displayName: 🟣 Publish workload artifacts From 93078cad47fad723e51d5d957a1b94ac4f7a6b3f Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 19 Sep 2025 15:03:35 -0700 Subject: [PATCH 096/118] Completed the creation of the version override script. --- eng/create-version-override.ps1 | 144 ++++++++++++++++++++++---------- eng/pipelines/official.yml | 4 +- 2 files changed, 101 insertions(+), 47 deletions(-) diff --git a/eng/create-version-override.ps1 b/eng/create-version-override.ps1 index 86a44aad6..36628fd57 100644 --- a/eng/create-version-override.ps1 +++ b/eng/create-version-override.ps1 @@ -1,54 +1,108 @@ -# Using the downloaded workloads, this creates the VS drops to upload for VS insertion. -# It builds the Microsoft.NET.Workloads.Vsman.vsmanproj per workload ZIP, which creates the appropriate VSMAN file. +# Creates a Version.Overrides.props file to override version components for the workload set creation. -# $workloadPath: The path to the directory containing the workload ZIPs, usually the output path used by DARC in the download-workloads.ps1 script. -# - Example Value: "$(RepoRoot)artifacts\workloads" -# $msBuildToolsPath: The path to the MSBuild tools directory, generally $(MSBuildToolsPath) in MSBuild. -# - Example Value: 'C:\Program Files\Microsoft Visual Studio\2022\Preview\MSBuild\Current\Bin' +# $createTestWorkloadSet: +# - If $true, adds PreReleaseVersionIteration overrides for creating a test workload set. +# - If $false, does not add any PreReleaseVersionIteration overrides. +# $versionSdkMinor: Adds the VersionSdkMinor property to the Version.Overrides.props file with the provided value. +# - Example Value: '2' +# $versionFeature: Adds the VersionFeature property to the Version.Overrides.props file with the provided value. +# - Example Value: '01' +# $versionPatch: Adds the VersionPatch property to the Version.Overrides.props file with the provided value. +# - Example Value: '4' +# $preReleaseVersionLabel: Adds the PreReleaseVersionLabel property to the Version.Overrides.props file with the provided value. +# - Example Value: 'preview' +# $preReleaseVersionIteration: Adds the PreReleaseVersionIteration property to the Version.Overrides.props file with the provided value. +# - Example Value: '1' -param ([bool] $createTestWorkloadSet = $false, [string] $sdkVersionMinor = '|default|', [string] $versionFeature = '|default|', [string] $versionPatch = '|default|', [string] $preReleaseVersionLabel = '|default|', [string] $preReleaseVersionIteration = '|default|') +param ([bool] $createTestWorkloadSet = $false, [string] $versionSdkMinor = '|default|', [string] $versionFeature = '|default|', [string] $versionPatch = '|default|', [string] $preReleaseVersionLabel = '|default|', [string] $preReleaseVersionIteration = '|default|') $containsNonDefault = ($sdkVersionMinor, $versionFeature, $versionPatch, $preReleaseVersionLabel, $preReleaseVersionIteration | Where-Object { $_ -ne '|default|' }) -ne $null if (-not $containsNonDefault -and -not $createTestWorkloadSet) { - Write-Host "No version overrides to apply." - exit 0 + Write-Host 'No version overrides to apply.' + exit 0 } $xmlDoc = New-Object System.Xml.XmlDocument -$projectElement = $xmlDoc.CreateElement("Project") -$xmlDoc.AppendChild($rootElement) - -$propertyGroup1Element = $xmlDoc.CreateElement("PropertyGroup") -$projectElement.AppendChild($propertyGroup1Element) - -$propertyGroup2Element = $xmlDoc.CreateElement("PropertyGroup") -$projectElement.AppendChild($propertyGroup2Element) - - -$settingElement.SetAttribute("Name", "LogLevel") -$settingElement.InnerText = "Debug" - -$xmlDoc.Save("D:\Workspace\TestMe.xml") - - - -# -# -# 3 -# 05 -# 0 -# rc -# 1 -# -# -# $(VersionMajor).$(VersionSDKMinor)$(VersionFeature).$(VersionPatch) -# $(VersionMajor).$(VersionMinor).$(VersionSDKMinor)$(VersionFeature) -# $(WorkloadsVersion).$(VersionPatch) -# $(VersionMajor).$(VersionMinor).$(VersionSDKMinor)00 -# $(SdkFeatureBand)-$(PreReleaseVersionLabel).$(PreReleaseVersionIteration) -# -# $(PreReleaseVersionIteration).0 -# 0 -# -# \ No newline at end of file +$project = $xmlDoc.CreateElement('Project') +$propertyGroup1 = $xmlDoc.CreateElement('PropertyGroup') + +if ($versionSdkMinor -ne '|default|') { + $versionSdkMinorElem = $xmlDoc.CreateElement('VersionSdkMinor') + $versionSdkMinorElem.InnerText = $versionSdkMinor + $null = $propertyGroup1.AppendChild($versionSdkMinorElem) + Write-Host "Setting VersionSdkMinor to $versionSdkMinor." +} + +if ($versionFeature -ne '|default|') { + $versionFeatureElem = $xmlDoc.CreateElement('VersionFeature') + $versionFeatureElem.InnerText = $versionFeature + $null = $propertyGroup1.AppendChild($versionFeatureElem) + Write-Host "Setting VersionFeature to $versionFeature." +} + +if ($versionPatch -ne '|default|') { + $versionPatchElem = $xmlDoc.CreateElement('VersionPatch') + $versionPatchElem.InnerText = $versionPatch + $null = $propertyGroup1.AppendChild($versionPatchElem) + Write-Host "Setting VersionPatch to $versionPatch." +} + +if ($preReleaseVersionLabel -ne '|default|') { + $preReleaseVersionLabelElem = $xmlDoc.CreateElement('PreReleaseVersionLabel') + $preReleaseVersionLabelElem.InnerText = $preReleaseVersionLabel + $null = $propertyGroup1.AppendChild($preReleaseVersionLabelElem) + Write-Host "Setting PreReleaseVersionLabel to $preReleaseVersionLabel." +} + +if ($preReleaseVersionIteration -ne '|default|') { + $preReleaseVersionIterationElem = $xmlDoc.CreateElement('PreReleaseVersionIteration') + $null = $preReleaseVersionIterationElem.SetAttribute('Condition', "'`$(StabilizePackageVersion)' != 'true'") + $preReleaseVersionIterationElem.InnerText = $preReleaseVersionIteration + $null = $propertyGroup1.AppendChild($preReleaseVersionIterationElem) + Write-Host "Setting PreReleaseVersionIteration to $preReleaseVersionIteration." +} + +$null = $project.AppendChild($propertyGroup1) +$propertyGroup2 = $xmlDoc.CreateElement('PropertyGroup') + +$versionPrefix = $xmlDoc.CreateElement('VersionPrefix') +$versionPrefix.InnerText = '$(VersionMajor).$(VersionSdkMinor)$(VersionFeature).$(VersionPatch)' +$null = $propertyGroup2.AppendChild($versionPrefix) + +$workloadsVersion1 = $xmlDoc.CreateElement('WorkloadsVersion') +$workloadsVersion1.InnerText = '$(VersionMajor).$(VersionMinor).$(VersionSdkMinor)$(VersionFeature)' +$null = $propertyGroup2.AppendChild($workloadsVersion1) + +$workloadsVersion2 = $xmlDoc.CreateElement('WorkloadsVersion') +$null = $workloadsVersion2.SetAttribute('Condition', "'`$(StabilizePackageVersion)' == 'true' and '`$(VersionPatch)' != '0'") +$workloadsVersion2.InnerText = '$(WorkloadsVersion).$(VersionPatch)' +$null = $propertyGroup2.AppendChild($workloadsVersion2) + +$sdkFeatureBand1 = $xmlDoc.CreateElement('SdkFeatureBand') +$sdkFeatureBand1.InnerText = '$(VersionMajor).$(VersionMinor).$(VersionSdkMinor)00' +$null = $propertyGroup2.AppendChild($sdkFeatureBand1) + +$sdkFeatureBand2 = $xmlDoc.CreateElement('SdkFeatureBand') +$null = $sdkFeatureBand2.SetAttribute('Condition', "'`$(StabilizePackageVersion)' != 'true' and '`$(PreReleaseVersionLabel)' != 'servicing'") +$sdkFeatureBand2.InnerText = '$(SdkFeatureBand)-$(PreReleaseVersionLabel).$(PreReleaseVersionIteration)' +$null = $propertyGroup2.AppendChild($sdkFeatureBand2) + +if ($createTestWorkloadSet) { + $preReleaseVersionIteration1 = $xmlDoc.CreateElement('PreReleaseVersionIteration') + $null = $preReleaseVersionIteration1.SetAttribute('Condition', "'`$(PreReleaseVersionLabel)' != 'servicing'") + $preReleaseVersionIteration1.InnerText = '$(PreReleaseVersionIteration).0' + $null = $propertyGroup2.AppendChild($preReleaseVersionIteration1) + + $preReleaseVersionIteration2 = $xmlDoc.CreateElement('PreReleaseVersionIteration') + $null = $preReleaseVersionIteration2.SetAttribute('Condition', "'`$(PreReleaseVersionLabel)' == 'servicing'") + $preReleaseVersionIteration2.InnerText = '0' + $null = $propertyGroup2.AppendChild($preReleaseVersionIteration2) + Write-Host 'Setting PreReleaseVersionIteration for test workload set.' +} + +$null = $project.AppendChild($propertyGroup2) +$null = $xmlDoc.AppendChild($project) + +$versionOverridesPath = Join-Path -Path $PSScriptRoot -ChildPath 'Version.Overrides.props' +$null = $xmlDoc.Save($versionOverridesPath) \ No newline at end of file diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index d45236f44..f4ef89744 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -93,8 +93,8 @@ parameters: # displayName: Set version minor # type: string # default: '|default|' -- name: setSdkVersionMinor - displayName: Set SDK version minor (one digit) +- name: setVersionSdkMinor + displayName: Set version SDK minor (one digit) type: string default: '|default|' - name: setVersionFeature From b46d7134c0e7a597625feab802aa4acf2a5cfac8 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Sat, 20 Sep 2025 07:07:05 -0700 Subject: [PATCH 097/118] Added parameter passing to call the version override script. --- eng/pipelines/official.yml | 14 ++++++-------- eng/pipelines/templates/jobs/workload-build.yml | 9 ++++++++- 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index f4ef89744..ea6dd4259 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -85,14 +85,6 @@ parameters: displayName: '[ ######## SET VERSION ######## ]' type: boolean default: false -# - name: setVersionMajor -# displayName: Set version major -# type: string -# default: '|default|' -# - name: setVersionMinor -# displayName: Set version minor -# type: string -# default: '|default|' - name: setVersionSdkMinor displayName: Set version SDK minor (one digit) type: string @@ -193,6 +185,12 @@ extends: usePreComponentsForVSInsertion: ${{ parameters.usePreComponentsForVSInsertion }} includeNonShippingWorkloads: ${{ parameters.includeNonShippingWorkloads }} workloadDropNames: ${{ parameters.workloadDropNames }} + createTestWorkloadSet: ${{ parameters.createTestWorkloadSet }} + setVersionSdkMinor: ${{ parameters.setVersionSdkMinor }} + setVersionFeature: ${{ parameters.setVersionFeature }} + setVersionPatch: ${{ parameters.setVersionPatch }} + setPreReleaseVersionLabel: ${{ parameters.setPreReleaseVersionLabel }} + setPreReleaseVersionIteration: ${{ parameters.setPreReleaseVersionIteration }} - ${{ if eq(parameters.createVSInsertion, true) }}: - stage: Insertion diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 3e36fe0c6..2f8edfa8f 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -53,7 +53,14 @@ jobs: -workloadListJson '$(WorkloadListJson)' -usePreComponents:$${{ parameters.usePreComponentsForVSInsertion }} -includeNonShipping:$${{ parameters.includeNonShippingWorkloads }} - - powershell: 'Insert script here' + - powershell: eng/create-version-override.ps1 + -createTestWorkloadSet:$${{ parameters.createTestWorkloadSet }} + -versionSdkMinor '${{ parameters.setVersionSdkMinor }}' + -versionFeature '${{ parameters.setVersionFeature }}' + -versionPatch '${{ parameters.setVersionPatch }}' + -preReleaseVersionLabel '${{ parameters.setPreReleaseVersionLabel }}' + -preReleaseVersionIteration '${{ parameters.setPreReleaseVersionIteration }}' + displayName: 🟣 Create Version.Overrides.props # https://github.com/dotnet/arcade/blob/ccae251ef033746eb0213329953f5e3c1687693b/Documentation/CorePackages/Publishing.md#basic-onboarding-scenario-for-new-repositories-to-the-current-publishing-version-v3 - powershell: >- eng/common/build.ps1 From ed5f94555fdc4e07cf1d0928378d4c7b5040ed4a Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 30 Sep 2025 15:10:22 -0700 Subject: [PATCH 098/118] Fix incorrect checkout parameter names + string truncation script (#580) * Trying to add "self" as a repo to scan. * Trying to specify eng directly for scanning. * Had changed the alias names and forgot to update when the PR merged. * Include fix for commit message truncation. --- eng/pipelines/templates/jobs/workload-build.yml | 3 ++- eng/pipelines/templates/jobs/workload-insertion-job.yml | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 9014f1a83..c8a7f0aea 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -36,8 +36,9 @@ jobs: Write-Host "##vso[task.setvariable variable=OfficialBuildId]$(Build.BuildNumber)" # Keep only valid characters. Invalid characters include: " / : < > \ | ? @ * # Also, strip any trailing '.' characters as those are invalid too. + $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '["\/:<>\\|?@*]|\.{1,}$', '' # Lastly, truncate to 255 max characters: 241 = 255 - 14 (for build number and delimiter, ex: 20250910.13 • ) - $commitMessage = ("$(git log -1 --pretty=%s)".Trim() -replace '["\/:<>\\|?@*]|\.{1,}$', '').Substring(0, 241) + $commitMessage = $commitMessage.Substring(0, [Math]::Min($commitMessage.Length, 241)) Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" displayName: 🟣 Set run name via source branch commit message - ${{ if eq(parameters.createVSInsertion, true) }}: diff --git a/eng/pipelines/templates/jobs/workload-insertion-job.yml b/eng/pipelines/templates/jobs/workload-insertion-job.yml index f65dbd4cc..59bf03ebb 100644 --- a/eng/pipelines/templates/jobs/workload-insertion-job.yml +++ b/eng/pipelines/templates/jobs/workload-insertion-job.yml @@ -34,8 +34,8 @@ jobs: # Otherwise, the workload drop directory path variables won't match the path on disk. - template: /eng/pipelines/templates/steps/workload-checkout.yml@self parameters: - sourceBranch: source - engBranch: self + sourceBranchAlias: source + engBranchAlias: self # This task, without the '1ES.' prefix, is not blocked on a buildJob type context. # 1ES docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/inputs/pipeline-artifact - task: DownloadPipelineArtifact@2 From 3bb19383c99e9599ce8dbe947cd8517cb3fe4b28 Mon Sep 17 00:00:00 2001 From: Marc Paine Date: Tue, 30 Sep 2025 15:10:26 -0700 Subject: [PATCH 099/118] Update workload manifests with version conditions for 9/10 (#579) --- src/Microsoft.NET.Workloads/workloads.props | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/Microsoft.NET.Workloads/workloads.props b/src/Microsoft.NET.Workloads/workloads.props index 05d89a84d..d83325943 100644 --- a/src/Microsoft.NET.Workloads/workloads.props +++ b/src/Microsoft.NET.Workloads/workloads.props @@ -4,8 +4,8 @@ - - + + @@ -21,8 +21,8 @@ - - + + From aa78a80afbbe8f139e27803be616974817318942 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 30 Sep 2025 15:33:58 -0700 Subject: [PATCH 100/118] Moved test workload set parameter up. --- eng/pipelines/official.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index f75171f5e..0a9f965e3 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -12,6 +12,10 @@ parameters: - name: sourceBranch displayName: 🚩 Source branch 🚩 type: string +- name: createTestWorkloadSet + displayName: ⭐ Create a test workload set + type: boolean + default: false - name: dividerAzDO displayName: '[ ######## AZURE DEVOPS ######## ]' @@ -116,10 +120,6 @@ parameters: displayName: Set pre-release version iteration type: string default: '|default|' -- name: createTestWorkloadSet - displayName: Create a test workload set - type: boolean - default: false variables: # Variables used: DncEngInternalBuildPool From 1da5e5d46fe05ab4580e50aa4ccb93898ee10baf Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 30 Sep 2025 15:38:42 -0700 Subject: [PATCH 101/118] Forgot to add the usage of officialBuildId when the Arcade flow occurred. --- eng/pipelines/templates/jobs/workload-build.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index c8a7f0aea..66d03b426 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -9,6 +9,7 @@ jobs: publishAssetsImmediately: true enableSbom: true repositoryAlias: source + officialBuildId: $(Build.BuildNumber) artifacts: publish: artifacts: From 99a71a6945f7fba2b3c5413243ec7f4276a61fe5 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 30 Sep 2025 15:44:15 -0700 Subject: [PATCH 102/118] Use explicit build number format since the original variable will be changed. --- eng/pipelines/templates/jobs/workload-build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 66d03b426..6c2b439d8 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -9,7 +9,7 @@ jobs: publishAssetsImmediately: true enableSbom: true repositoryAlias: source - officialBuildId: $(Build.BuildNumber) + officialBuildId: $(Date:yyyyMMdd).$(Rev:r) artifacts: publish: artifacts: From 0ecf3e36d8be3eeca2bab23e38264cc1dc1a993e Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 30 Sep 2025 15:46:43 -0700 Subject: [PATCH 103/118] Trying the explicitly created OfficialBuildId. --- eng/pipelines/templates/jobs/workload-build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 6c2b439d8..59b43f142 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -9,7 +9,7 @@ jobs: publishAssetsImmediately: true enableSbom: true repositoryAlias: source - officialBuildId: $(Date:yyyyMMdd).$(Rev:r) + officialBuildId: $(OfficialBuildId) artifacts: publish: artifacts: From a17bcd69c73d3106f7f5241e754f8d9a0f788741 Mon Sep 17 00:00:00 2001 From: Marc Paine Date: Tue, 30 Sep 2025 16:45:50 -0700 Subject: [PATCH 104/118] Add Aspire workload manifest for version <= 9 (#581) --- src/Microsoft.NET.Workloads/workloads.props | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/Microsoft.NET.Workloads/workloads.props b/src/Microsoft.NET.Workloads/workloads.props index d83325943..782e5b400 100644 --- a/src/Microsoft.NET.Workloads/workloads.props +++ b/src/Microsoft.NET.Workloads/workloads.props @@ -25,4 +25,7 @@ + + + From 7494a03687019e0a128ac881af6af588c63ead71 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 30 Sep 2025 16:54:10 -0700 Subject: [PATCH 105/118] Try using expression syntax with OfficialBuildId. --- eng/pipelines/templates/jobs/workload-build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 59b43f142..687cc9224 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -9,7 +9,7 @@ jobs: publishAssetsImmediately: true enableSbom: true repositoryAlias: source - officialBuildId: $(OfficialBuildId) + officialBuildId: $[variables.OfficialBuildId] artifacts: publish: artifacts: From 0a7a3ffae5cfae00f391ef8752da0df97bd8b77a Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 30 Sep 2025 16:55:38 -0700 Subject: [PATCH 106/118] Try using expression syntax with Build.BuildNumber. --- eng/pipelines/templates/jobs/workload-build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 687cc9224..1bf04720b 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -9,7 +9,7 @@ jobs: publishAssetsImmediately: true enableSbom: true repositoryAlias: source - officialBuildId: $[variables.OfficialBuildId] + officialBuildId: $[variables.Build.BuildNumber] artifacts: publish: artifacts: From feb432bf1ee4ed2f783d4eae5bdc8df193500cc1 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 30 Sep 2025 17:00:03 -0700 Subject: [PATCH 107/118] Specified the variable format wrong for Build.BuildNumber. --- eng/pipelines/templates/jobs/workload-build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 1bf04720b..5ac7fd317 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -9,7 +9,7 @@ jobs: publishAssetsImmediately: true enableSbom: true repositoryAlias: source - officialBuildId: $[variables.Build.BuildNumber] + officialBuildId: $[variables['Build.BuildNumber']] artifacts: publish: artifacts: From a6cbb6fd8d420db7a45bb8a185ab1ec67e265dc0 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 30 Sep 2025 17:02:50 -0700 Subject: [PATCH 108/118] Specified the variable format wrong for OfficialBuildId. --- eng/pipelines/templates/jobs/workload-build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 5ac7fd317..158cf7089 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -9,7 +9,7 @@ jobs: publishAssetsImmediately: true enableSbom: true repositoryAlias: source - officialBuildId: $[variables['Build.BuildNumber']] + officialBuildId: $[variables['OfficialBuildId']] artifacts: publish: artifacts: From d6d736cfea99fb9ac42445b5e12702a1f85ab324 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 30 Sep 2025 17:11:53 -0700 Subject: [PATCH 109/118] Set a new OfficialBuildIdFromJob as a stage variable and access that as a runtime expression. --- eng/pipelines/templates/jobs/workload-build.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 158cf7089..903f7a0e6 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -9,7 +9,7 @@ jobs: publishAssetsImmediately: true enableSbom: true repositoryAlias: source - officialBuildId: $[variables['OfficialBuildId']] + officialBuildId: $[stageDependencies.Build.BuildRepo.outputs['SetRunName.OfficialBuildIdFromJob']] artifacts: publish: artifacts: @@ -35,6 +35,7 @@ jobs: # See: https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number - powershell: | Write-Host "##vso[task.setvariable variable=OfficialBuildId]$(Build.BuildNumber)" + Write-Host "##vso[task.setvariable variable=OfficialBuildIdFromJob;isoutput=true]$(Build.BuildNumber)" # Keep only valid characters. Invalid characters include: " / : < > \ | ? @ * # Also, strip any trailing '.' characters as those are invalid too. $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '["\/:<>\\|?@*]|\.{1,}$', '' @@ -42,6 +43,8 @@ jobs: $commitMessage = $commitMessage.Substring(0, [Math]::Min($commitMessage.Length, 241)) Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" displayName: 🟣 Set run name via source branch commit message + # Name is required to reference the variables created within this build step in other stages. + name: SetRunName - ${{ if eq(parameters.createVSInsertion, true) }}: # The convertToJson expression in AzDO creates "pretty" JSON with line breaks and indentation. # To simplify passing this JSON to scripts, we collapse it to a single line. From 29ba3265ab4ee1a268d9bc7561ada44c974a8e6c Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 30 Sep 2025 18:17:26 -0700 Subject: [PATCH 110/118] After running multiple builds, only this solution is required. --- eng/pipelines/templates/jobs/workload-build.yml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 903f7a0e6..f294e1df2 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -9,7 +9,9 @@ jobs: publishAssetsImmediately: true enableSbom: true repositoryAlias: source - officialBuildId: $[stageDependencies.Build.BuildRepo.outputs['SetRunName.OfficialBuildIdFromJob']] + # This variable is evaluated when this job template is executed. + # Therefore, the value will not be affected by the updatebuildnumber script below. + officialBuildId: $[variables['Build.BuildNumber']] artifacts: publish: artifacts: @@ -35,7 +37,6 @@ jobs: # See: https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number - powershell: | Write-Host "##vso[task.setvariable variable=OfficialBuildId]$(Build.BuildNumber)" - Write-Host "##vso[task.setvariable variable=OfficialBuildIdFromJob;isoutput=true]$(Build.BuildNumber)" # Keep only valid characters. Invalid characters include: " / : < > \ | ? @ * # Also, strip any trailing '.' characters as those are invalid too. $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '["\/:<>\\|?@*]|\.{1,}$', '' @@ -43,8 +44,6 @@ jobs: $commitMessage = $commitMessage.Substring(0, [Math]::Min($commitMessage.Length, 241)) Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" displayName: 🟣 Set run name via source branch commit message - # Name is required to reference the variables created within this build step in other stages. - name: SetRunName - ${{ if eq(parameters.createVSInsertion, true) }}: # The convertToJson expression in AzDO creates "pretty" JSON with line breaks and indentation. # To simplify passing this JSON to scripts, we collapse it to a single line. From 993cb4cb3400a520372336122418f3fc3febfa76 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 1 Oct 2025 12:16:24 -0700 Subject: [PATCH 111/118] Removed the build number custom logic. --- .../templates/jobs/workload-build.yml | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index f294e1df2..45e2169d2 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -9,9 +9,9 @@ jobs: publishAssetsImmediately: true enableSbom: true repositoryAlias: source - # This variable is evaluated when this job template is executed. - # Therefore, the value will not be affected by the updatebuildnumber script below. - officialBuildId: $[variables['Build.BuildNumber']] + # # This variable is evaluated when this job template is executed. + # # Therefore, the value will not be affected by the updatebuildnumber script below. + # officialBuildId: $[variables['Build.BuildNumber']] artifacts: publish: artifacts: @@ -32,18 +32,18 @@ jobs: parameters: sourceBranchAlias: source engBranchAlias: self - # Sets the run name to use the source branch commit message. - # Also, sets the OfficialBuildId variable to the original Build.BuildNumber for use in Arcade. - # See: https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number - - powershell: | - Write-Host "##vso[task.setvariable variable=OfficialBuildId]$(Build.BuildNumber)" - # Keep only valid characters. Invalid characters include: " / : < > \ | ? @ * - # Also, strip any trailing '.' characters as those are invalid too. - $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '["\/:<>\\|?@*]|\.{1,}$', '' - # Lastly, truncate to 255 max characters: 241 = 255 - 14 (for build number and delimiter, ex: 20250910.13 • ) - $commitMessage = $commitMessage.Substring(0, [Math]::Min($commitMessage.Length, 241)) - Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" - displayName: 🟣 Set run name via source branch commit message + # # Sets the run name to use the source branch commit message. + # # Also, sets the OfficialBuildId variable to the original Build.BuildNumber for use in Arcade. + # # See: https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number + # - powershell: | + # Write-Host "##vso[task.setvariable variable=OfficialBuildId]$(Build.BuildNumber)" + # # Keep only valid characters. Invalid characters include: " / : < > \ | ? @ * + # # Also, strip any trailing '.' characters as those are invalid too. + # $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '["\/:<>\\|?@*]|\.{1,}$', '' + # # Lastly, truncate to 255 max characters: 241 = 255 - 14 (for build number and delimiter, ex: 20250910.13 • ) + # $commitMessage = $commitMessage.Substring(0, [Math]::Min($commitMessage.Length, 241)) + # Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" + # displayName: 🟣 Set run name via source branch commit message - ${{ if eq(parameters.createVSInsertion, true) }}: # The convertToJson expression in AzDO creates "pretty" JSON with line breaks and indentation. # To simplify passing this JSON to scripts, we collapse it to a single line. @@ -79,7 +79,7 @@ jobs: /p:DotNetSignType=$(_SignType) /p:TeamName=$(_TeamName) /p:DotNetPublishUsingPipelines=true - /p:OfficialBuildId=$(OfficialBuildId) + /p:OfficialBuildId=$(Build.BuildNumber) /p:StabilizePackageVersion=${{ parameters.stabilizePackageVersion }} displayName: 🟣 Build solution # Name is required to reference the variables created within this build step in other stages. From 60ace7303044303b5d705b97c80267f2ddf1af39 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 1 Oct 2025 12:24:37 -0700 Subject: [PATCH 112/118] Trying to export OfficialBuildIdFromJob and use that in insertion. --- .../templates/jobs/workload-build.yml | 35 ++++++++++--------- .../templates/jobs/workload-insertion-job.yml | 6 +++- .../steps/workload-insertion-steps.yml | 5 +-- 3 files changed, 27 insertions(+), 19 deletions(-) diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index 45e2169d2..f9f9b2a48 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -9,9 +9,9 @@ jobs: publishAssetsImmediately: true enableSbom: true repositoryAlias: source - # # This variable is evaluated when this job template is executed. - # # Therefore, the value will not be affected by the updatebuildnumber script below. - # officialBuildId: $[variables['Build.BuildNumber']] + # This variable is evaluated when this job template is executed. + # Therefore, the value will not be affected by the updatebuildnumber script below. + officialBuildId: $[variables['Build.BuildNumber']] artifacts: publish: artifacts: @@ -32,18 +32,21 @@ jobs: parameters: sourceBranchAlias: source engBranchAlias: self - # # Sets the run name to use the source branch commit message. - # # Also, sets the OfficialBuildId variable to the original Build.BuildNumber for use in Arcade. - # # See: https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number - # - powershell: | - # Write-Host "##vso[task.setvariable variable=OfficialBuildId]$(Build.BuildNumber)" - # # Keep only valid characters. Invalid characters include: " / : < > \ | ? @ * - # # Also, strip any trailing '.' characters as those are invalid too. - # $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '["\/:<>\\|?@*]|\.{1,}$', '' - # # Lastly, truncate to 255 max characters: 241 = 255 - 14 (for build number and delimiter, ex: 20250910.13 • ) - # $commitMessage = $commitMessage.Substring(0, [Math]::Min($commitMessage.Length, 241)) - # Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" - # displayName: 🟣 Set run name via source branch commit message + # Sets the run name to use the source branch commit message. + # Also, sets the OfficialBuildId variable to the original Build.BuildNumber for use in Arcade. + # See: https://learn.microsoft.com/en-us/azure/devops/pipelines/process/run-number + - powershell: | + Write-Host "##vso[task.setvariable variable=OfficialBuildId]$(Build.BuildNumber)" + Write-Host "##vso[task.setvariable variable=OfficialBuildIdFromJob;isoutput=true]$(Build.BuildNumber)" + # Keep only valid characters. Invalid characters include: " / : < > \ | ? @ * + # Also, strip any trailing '.' characters as those are invalid too. + $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '["\/:<>\\|?@*]|\.{1,}$', '' + # Lastly, truncate to 255 max characters: 241 = 255 - 14 (for build number and delimiter, ex: 20250910.13 • ) + $commitMessage = $commitMessage.Substring(0, [Math]::Min($commitMessage.Length, 241)) + Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" + displayName: 🟣 Set run name via source branch commit message + # Name is required to reference the variables created within this build step in other stages. + name: SetRunName - ${{ if eq(parameters.createVSInsertion, true) }}: # The convertToJson expression in AzDO creates "pretty" JSON with line breaks and indentation. # To simplify passing this JSON to scripts, we collapse it to a single line. @@ -79,7 +82,7 @@ jobs: /p:DotNetSignType=$(_SignType) /p:TeamName=$(_TeamName) /p:DotNetPublishUsingPipelines=true - /p:OfficialBuildId=$(Build.BuildNumber) + /p:OfficialBuildId=$(OfficialBuildId) /p:StabilizePackageVersion=${{ parameters.stabilizePackageVersion }} displayName: 🟣 Build solution # Name is required to reference the variables created within this build step in other stages. diff --git a/eng/pipelines/templates/jobs/workload-insertion-job.yml b/eng/pipelines/templates/jobs/workload-insertion-job.yml index 59bf03ebb..93698794e 100644 --- a/eng/pipelines/templates/jobs/workload-insertion-job.yml +++ b/eng/pipelines/templates/jobs/workload-insertion-job.yml @@ -19,6 +19,8 @@ jobs: templateContext: type: buildJob variables: + - name: OfficialBuildIdFromJob + value: $[stageDependencies.Build.BuildRepo.outputs['SetRunName.OfficialBuildIdFromJob']] - name: PrimaryVSComponentJsonValues value: $[stageDependencies.Build.BuildRepo.outputs['BuildSolution.PrimaryVSComponentJsonValues']] - name: SecondaryVSComponentJsonValues @@ -90,6 +92,7 @@ jobs: topicBranch: ${{ parameters.vsTopicBranch }} # PrimaryVSComponentJsonValues variable is set during create-workload-drops.ps1 in Microsoft.NET.Workloads.Vsman.csproj. componentJsonValues: $(PrimaryVSComponentJsonValues) + officialBuildId: $(OfficialBuildIdFromJob) - ${{ each secondaryInsertionBranch in parameters.secondaryVsInsertionBranches }}: # One PR is created per branch defined at the top of official.yml in the secondaryVsInsertionBranches parameter. - template: /eng/pipelines/templates/steps/workload-insertion-steps.yml@self @@ -97,4 +100,5 @@ jobs: targetBranch: ${{ secondaryInsertionBranch }} topicBranch: ${{ parameters.vsTopicBranch }} # SecondaryVSComponentJsonValues variable is set during create-workload-drops.ps1 in Microsoft.NET.Workloads.Vsman.csproj. - componentJsonValues: $(SecondaryVSComponentJsonValues) \ No newline at end of file + componentJsonValues: $(SecondaryVSComponentJsonValues) + officialBuildId: $(OfficialBuildIdFromJob) \ No newline at end of file diff --git a/eng/pipelines/templates/steps/workload-insertion-steps.yml b/eng/pipelines/templates/steps/workload-insertion-steps.yml index d974467ae..372234ae7 100644 --- a/eng/pipelines/templates/steps/workload-insertion-steps.yml +++ b/eng/pipelines/templates/steps/workload-insertion-steps.yml @@ -2,6 +2,7 @@ parameters: targetBranch: main topicBranch: '' componentJsonValues: '' + officialBuildId: '' steps: # This allows setting the InsertionTopicBranch variable dynamically. @@ -16,7 +17,7 @@ steps: # Loosely based on: # https://devdiv.visualstudio.com/Engineering/_git/MicroBuild?path=/src/Tasks/InsertVsPayload/plugin.ps1&version=GCf10314b240d5f3d0899e80eb2feb5dc33b5f8c20&line=276&lineEnd=280&lineStartColumn=1&lineEndColumn=1&lineStyle=plain&_a=contents if ($topicBranch -eq '|temp|') { - $topicBranch = 'temp/$(_TeamName)/${{ parameters.targetBranch }}/$(Build.BuildNumber)-$(System.JobAttempt)' + $topicBranch = 'temp/$(_TeamName)/${{ parameters.targetBranch }}/${{ parameters.officialBuildId }}-$(System.JobAttempt)' } Write-Host "InsertionTopicBranch: $topicBranch" Write-Host "##vso[task.setvariable variable=InsertionTopicBranch]$topicBranch" @@ -34,7 +35,7 @@ steps: InsertionTopicBranch: $(InsertionTopicBranch) TeamName: $(_TeamName) TeamEmail: dotnetdevexcli@microsoft.com - InsertionPayloadName: 'DotNet-SDK-Workloads ($(Build.SourceBranchName):$(Build.BuildNumber))' + InsertionPayloadName: 'DotNet-SDK-Workloads ($(Build.SourceBranchName):${{ parameters.officialBuildId }})' ComponentJsonValues: ${{ parameters.componentJsonValues }} AllowTopicBranchUpdate: true # This is the name of our DevDiv alias. From 279af9ce0e393afde57a6c48131f0b932a667cdf Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Wed, 1 Oct 2025 14:16:02 -0700 Subject: [PATCH 113/118] Keeping all previous changes but commenting out the actual build number update lines. --- eng/pipelines/official.yml | 4 +++- eng/pipelines/templates/jobs/workload-build.yml | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 1fa8a5b79..700e80704 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -6,7 +6,9 @@ pr: none # Required to set a custom run name within workload-build.yml. # See: https://learn.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/pipeline -appendCommitMessageToRunName: false +# ========================== +# TODO: Temporarily commenting out updating the build number. +# appendCommitMessageToRunName: false parameters: - name: sourceBranch diff --git a/eng/pipelines/templates/jobs/workload-build.yml b/eng/pipelines/templates/jobs/workload-build.yml index f9f9b2a48..172d4ff20 100644 --- a/eng/pipelines/templates/jobs/workload-build.yml +++ b/eng/pipelines/templates/jobs/workload-build.yml @@ -43,7 +43,9 @@ jobs: $commitMessage = "$(git log -1 --pretty=%s)".Trim() -replace '["\/:<>\\|?@*]|\.{1,}$', '' # Lastly, truncate to 255 max characters: 241 = 255 - 14 (for build number and delimiter, ex: 20250910.13 • ) $commitMessage = $commitMessage.Substring(0, [Math]::Min($commitMessage.Length, 241)) - Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" + # ========================== + # TODO: Temporarily commenting out updating the build number. + # Write-Host "##vso[build.updatebuildnumber]$(Build.BuildNumber) • $commitMessage" displayName: 🟣 Set run name via source branch commit message # Name is required to reference the variables created within this build step in other stages. name: SetRunName From f67da129b6e1eda0419d662cbeccff5f819c0e52 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Fri, 3 Oct 2025 14:16:48 -0700 Subject: [PATCH 114/118] Add test as a pre-release label value. --- eng/pipelines/official.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 0a9f965e3..4d37b3a34 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -116,6 +116,7 @@ parameters: - rc - alpha - rtm + - test - name: setPreReleaseVersionIteration displayName: Set pre-release version iteration type: string From 710636d9f4e5ab3aeac86c8a26376cfe1c9ad6dc Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 14 Oct 2025 14:01:18 -0700 Subject: [PATCH 115/118] Changed networkIsolationPolicy to Preferred. --- eng/pipelines/official.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 42c96f3bc..fbac11dca 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -162,6 +162,10 @@ resources: extends: template: v1/1ES.Official.PipelineTemplate.yml@1esPipelines parameters: + settings: + # Default is 'Preferred,CFSClean' which blocks NuGet.org for publishing. + # Docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-build/cloudbuild/security/1espt-network-isolation + networkIsolationPolicy: Preferred sdl: # Docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/sdlanalysis/sourceanalysisstage#my-pipeline-uses-multiple-repositories-how-to-ensure-that-sdl-sources-stage-is-injected-for-all-the-repositories sourceRepositoriesToScan: From f6c1bedfb9488c02fda3ad1c4cbe339cf5da1c98 Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 14 Oct 2025 15:06:34 -0700 Subject: [PATCH 116/118] Added comment for release job type. --- eng/pipelines/official.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index fbac11dca..383a378eb 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -229,6 +229,7 @@ extends: image: 1es-windows-2022 os: windows templateContext: + # Docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/releasepipelines/overview type: releaseJob isProduction: true strategy: From e94bbc2991e5b465c5eee6cd99257c3e1197ff5b Mon Sep 17 00:00:00 2001 From: Michael Yanni Date: Tue, 14 Oct 2025 15:44:29 -0700 Subject: [PATCH 117/118] Accidentally set this to Preferred (and the branch name) instead of Permissive. --- eng/pipelines/official.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/eng/pipelines/official.yml b/eng/pipelines/official.yml index 383a378eb..d621f9644 100644 --- a/eng/pipelines/official.yml +++ b/eng/pipelines/official.yml @@ -163,9 +163,9 @@ extends: template: v1/1ES.Official.PipelineTemplate.yml@1esPipelines parameters: settings: - # Default is 'Preferred,CFSClean' which blocks NuGet.org for publishing. + # Default is 'Permissive,CFSClean' which blocks NuGet.org for publishing. # Docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-build/cloudbuild/security/1espt-network-isolation - networkIsolationPolicy: Preferred + networkIsolationPolicy: Permissive sdl: # Docs: https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/sdlanalysis/sourceanalysisstage#my-pipeline-uses-multiple-repositories-how-to-ensure-that-sdl-sources-stage-is-injected-for-all-the-repositories sourceRepositoriesToScan: From 0aabde85a1162dedf67743b57b780f187ba83956 Mon Sep 17 00:00:00 2001 From: Marc Paine Date: Fri, 24 Oct 2025 09:01:29 -0700 Subject: [PATCH 118/118] hardcode the .111 build so unblock runtime insertion --- eng/download-workloads.ps1 | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/eng/download-workloads.ps1 b/eng/download-workloads.ps1 index 33aba436a..e35ecfda2 100644 --- a/eng/download-workloads.ps1 +++ b/eng/download-workloads.ps1 @@ -77,10 +77,8 @@ $versionDetails | ForEach-Object { 'gather-drop' '--asset-filter' $assetFilter - '--repo' - $_.Uri - '--commit' - $_.Sha + '--id' + '288422' '--output-dir' $workloadPath '--include-released'