diff --git a/tools/pipelines/build-api-markdown-documenter.yml b/tools/pipelines/build-api-markdown-documenter.yml index f961216f9331..7a33609a2014 100644 --- a/tools/pipelines/build-api-markdown-documenter.yml +++ b/tools/pipelines/build-api-markdown-documenter.yml @@ -85,7 +85,7 @@ extends: publish: ${{ variables.publish }} publishOverride: ${{ parameters.publishOverride }} releaseBuildOverride: ${{ parameters.releaseBuildOverride }} - buildDirectory: tools/api-markdown-documenter + buildDirectory: ${{ variables.FluidFrameworkDirectory }}/tools/api-markdown-documenter packageManagerInstallCommand: 'pnpm i --frozen-lockfile' packageManager: pnpm testCoverage: ${{ variables.testCoverage }} diff --git a/tools/pipelines/build-benchmark-tool.yml b/tools/pipelines/build-benchmark-tool.yml index 680676b8e588..cf44544f2d2b 100644 --- a/tools/pipelines/build-benchmark-tool.yml +++ b/tools/pipelines/build-benchmark-tool.yml @@ -91,7 +91,7 @@ extends: packageManagerInstallCommand: 'pnpm i --frozen-lockfile' testCoverage: ${{ variables.testCoverage }} packageManager: pnpm - buildDirectory: tools/benchmark + buildDirectory: ${{ variables.FluidFrameworkDirectory }}/tools/benchmark tagName: benchmark taskBuild: build taskBuildDocs: true diff --git a/tools/pipelines/build-build-common.yml b/tools/pipelines/build-build-common.yml index 32e057f61f8e..c377048710f4 100644 --- a/tools/pipelines/build-build-common.yml +++ b/tools/pipelines/build-build-common.yml @@ -88,7 +88,7 @@ extends: publishOverride: ${{ parameters.publishOverride }} releaseBuildOverride: ${{ parameters.releaseBuildOverride }} buildToolsVersionToInstall: ${{ parameters.buildToolsVersionToInstall }} - buildDirectory: common/build/build-common + buildDirectory: ${{ variables.FluidFrameworkDirectory }}/common/build/build-common tagName: build-common packageManagerInstallCommand: 'pnpm i --frozen-lockfile' packageManager: pnpm diff --git a/tools/pipelines/build-build-tools.yml b/tools/pipelines/build-build-tools.yml index b40c7db03f71..15b7dcb38d97 100644 --- a/tools/pipelines/build-build-tools.yml +++ b/tools/pipelines/build-build-tools.yml @@ -105,7 +105,7 @@ extends: packageManagerInstallCommand: 'pnpm i --frozen-lockfile' packageManager: pnpm testCoverage: ${{ variables.testCoverage }} - buildDirectory: build-tools + buildDirectory: ${{ variables.FluidFrameworkDirectory }}/build-tools tagName: build-tools isReleaseGroup: true taskBuild: build diff --git a/tools/pipelines/build-bundle-size-artifacts.yml b/tools/pipelines/build-bundle-size-artifacts.yml index 0e61aec12beb..361dd74ef813 100644 --- a/tools/pipelines/build-bundle-size-artifacts.yml +++ b/tools/pipelines/build-bundle-size-artifacts.yml @@ -30,7 +30,7 @@ extends: testCoverage: false packageManagerInstallCommand: 'pnpm i --frozen-lockfile' packageManager: pnpm - buildDirectory: . + buildDirectory: ${{ variables.FluidFrameworkDirectory }} tagName: bundle-and-code-coverage-artifacts # This pipeline doesn't generate production artifacts but the build-npm-package template is too intertwined with # that scenario, which requires that the pipeline runs in the 1ES pool. diff --git a/tools/pipelines/build-client.yml b/tools/pipelines/build-client.yml index 1b10e07d3db9..eb7dd4b00387 100644 --- a/tools/pipelines/build-client.yml +++ b/tools/pipelines/build-client.yml @@ -192,7 +192,7 @@ extends: packageManager: pnpm testCoverage: ${{ eq(variables['System.TeamProject'], 'public' ) }} # disabling code coverage for internal project since we don't use it reportCodeCoverageComparison: ${{ eq(variables['Build.Reason'], 'PullRequest') }} - buildDirectory: . + buildDirectory: ${{ variables.FluidFrameworkDirectory }} tagName: client isReleaseGroup: true poolBuild: Large-eastus2 diff --git a/tools/pipelines/build-common-utils.yml b/tools/pipelines/build-common-utils.yml index 6ff03c9ca7ad..554d014080f2 100644 --- a/tools/pipelines/build-common-utils.yml +++ b/tools/pipelines/build-common-utils.yml @@ -96,7 +96,7 @@ extends: publishOverride: ${{ parameters.publishOverride }} releaseBuildOverride: ${{ parameters.releaseBuildOverride }} buildToolsVersionToInstall: ${{ parameters.buildToolsVersionToInstall }} - buildDirectory: common/lib/common-utils + buildDirectory: ${{ variables.FluidFrameworkDirectory }}/common/lib/common-utils tagName: common-utils packageManagerInstallCommand: 'pnpm i --frozen-lockfile' packageManager: pnpm diff --git a/tools/pipelines/build-docs.yml b/tools/pipelines/build-docs.yml index 502d1b8bad24..adc7a0f4135b 100644 --- a/tools/pipelines/build-docs.yml +++ b/tools/pipelines/build-docs.yml @@ -97,12 +97,13 @@ stages: displayName: 'Check Version Deployment Condition' steps: - checkout: self + path: $(FluidFrameworkDirectory) submodules: false clean: true - template: /tools/pipelines/templates/include-install-build-tools.yml@self parameters: - buildDirectory: $(Build.SourcesDirectory) + buildDirectory: $(Pipeline.Workspace)/$(FluidFrameworkDirectory) - task: Bash@3 name: SetVersion displayName: 'Set Build Version' @@ -110,7 +111,7 @@ stages: VERSION_BUILDNUMBER: $(Build.BuildNumber) inputs: targetType: 'inline' - workingDirectory: $(Build.SourcesDirectory) + workingDirectory: $(Pipeline.Workspace)/$(FluidFrameworkDirectory) script: | # Generate the build version. Sets the environment variables version, codeVersion, and isLatest. flub generate buildVersion @@ -119,7 +120,7 @@ stages: displayName: 'Check Version Deployment Condition' inputs: targetType: 'inline' - workingDirectory: $(Build.SourcesDirectory) + workingDirectory: $(Pipeline.Workspace)/$(FluidFrameworkDirectory) script: | # Check if the version is the latest minor of its corresponding major version series # Sets variable shouldDeploy to true if the version is the latest minor and false otherwise @@ -172,14 +173,14 @@ stages: - template: /tools/pipelines/templates/include-install-pnpm.yml@self parameters: - buildDirectory: $(Build.SourcesDirectory)/docs + buildDirectory: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs - task: Bash@3 displayName: Install dependencies retryCountOnTaskFailure: 4 inputs: targetType: 'inline' - workingDirectory: $(Build.SourcesDirectory)/docs + workingDirectory: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs script: | set -eu -o pipefail pnpm i --frozen-lockfile @@ -188,7 +189,7 @@ stages: displayName: npm run build inputs: command: 'custom' - workingDir: $(Build.SourcesDirectory)/docs + workingDir: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs customCommand: 'run build' # Run the tests @@ -196,20 +197,20 @@ stages: displayName: Run tests inputs: command: 'custom' - workingDir: $(Build.SourcesDirectory)/docs + workingDir: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs customCommand: 'run test' - task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0 displayName: 'Generate SBOM' inputs: - BuildDropPath: $(Build.SourcesDirectory)/docs/build + BuildDropPath: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs/build PackageName: 'fluidframework-docs' PackageVersion: '$(Build.BuildId)' - task: PublishPipelineArtifact@1 displayName: 'Publish site build artifact' inputs: - targetPath: '$(Build.SourcesDirectory)/docs/build' + targetPath: '$(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs/build' artifactName: 'fluidframework-docs' publishLocation: 'pipeline' @@ -262,7 +263,7 @@ stages: inputs: GdnBreakPolicyMinSev: Warning GdnBreakAllTools: true - GdnBreakBaselineFiles: '$(Build.SourcesDirectory)/docs/.gdnbaselines' + GdnBreakBaselineFiles: '$(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs/.gdnbaselines' GdnBreakGdnToolESLint: true GdnBreakGdnToolESLintSeverity: Warning GdnBreakPolicy: M365 @@ -289,6 +290,7 @@ stages: continueOnError: true steps: - checkout: self + path: $(FluidFrameworkDirectory) submodules: false clean: true @@ -296,14 +298,14 @@ stages: - template: /tools/pipelines/templates/include-install-pnpm.yml@self parameters: - buildDirectory: $(Build.SourcesDirectory)/docs + buildDirectory: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs - task: Bash@3 displayName: Install dependencies retryCountOnTaskFailure: 4 inputs: targetType: 'inline' - workingDirectory: $(Build.SourcesDirectory)/docs + workingDirectory: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs script: | set -eu -o pipefail # Ensure it's easy to tell which workspace this is running in by inspecting the logs @@ -314,14 +316,14 @@ stages: displayName: Build inputs: command: 'custom' - workingDir: $(Build.SourcesDirectory)/docs + workingDir: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs customCommand: 'run build' - task: Npm@1 displayName: Validate links inputs: command: 'custom' - workingDir: $(Build.SourcesDirectory)/docs + workingDir: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs customCommand: 'run ci:check-links' - stage: deploy @@ -349,14 +351,14 @@ stages: inputs: source: current artifact: fluidframework-docs - path: '$(Build.SourcesDirectory)/docs/build' + path: '$(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs/build' - task: AzureStaticWebApp@0 displayName: 'Deploy website to ASWA' inputs: skip_app_build: true # site was built in previous stage skip_api_build: true # api is written in js, no build needed - cwd: $(Build.SourcesDirectory) + cwd: $(Pipeline.Workspace)/$(FluidFrameworkDirectory) app_location: 'docs/build' api_location: 'docs/api' output_location: '' diff --git a/tools/pipelines/build-eslint-config-fluid.yml b/tools/pipelines/build-eslint-config-fluid.yml index 56b529c94e0e..af8d1eb21e02 100644 --- a/tools/pipelines/build-eslint-config-fluid.yml +++ b/tools/pipelines/build-eslint-config-fluid.yml @@ -90,7 +90,7 @@ extends: publishOverride: ${{ parameters.publishOverride }} releaseBuildOverride: ${{ parameters.releaseBuildOverride }} buildToolsVersionToInstall: ${{ parameters.buildToolsVersionToInstall }} - buildDirectory: common/build/eslint-config-fluid + buildDirectory: ${{ variables.FluidFrameworkDirectory }}/common/build/eslint-config-fluid tagName: eslint-config-fluid packageManagerInstallCommand: 'pnpm i --frozen-lockfile' packageManager: pnpm diff --git a/tools/pipelines/build-eslint-plugin-fluid.yml b/tools/pipelines/build-eslint-plugin-fluid.yml index 9ea362f5f53e..7eefc754e0a6 100644 --- a/tools/pipelines/build-eslint-plugin-fluid.yml +++ b/tools/pipelines/build-eslint-plugin-fluid.yml @@ -90,7 +90,7 @@ extends: publishOverride: ${{ parameters.publishOverride }} releaseBuildOverride: ${{ parameters.releaseBuildOverride }} buildToolsVersionToInstall: ${{ parameters.buildToolsVersionToInstall }} - buildDirectory: common/build/eslint-plugin-fluid + buildDirectory: ${{ variables.FluidFrameworkDirectory }}/common/build/eslint-plugin-fluid tagName: eslint-plugin-fluid packageManagerInstallCommand: 'pnpm i --frozen-lockfile' packageManager: pnpm diff --git a/tools/pipelines/build-protocol-definitions.yml b/tools/pipelines/build-protocol-definitions.yml index 24b29c034bdb..e77e8b960a0d 100644 --- a/tools/pipelines/build-protocol-definitions.yml +++ b/tools/pipelines/build-protocol-definitions.yml @@ -97,7 +97,7 @@ extends: publishOverride: ${{ parameters.publishOverride }} releaseBuildOverride: ${{ parameters.releaseBuildOverride }} buildToolsVersionToInstall: ${{ parameters.buildToolsVersionToInstall }} - buildDirectory: common/lib/protocol-definitions + buildDirectory: ${{ variables.FluidFrameworkDirectory }}/common/lib/protocol-definitions packageManagerInstallCommand: 'pnpm i --frozen-lockfile' packageManager: pnpm tagName: protocol-definitions diff --git a/tools/pipelines/build-test-tools.yml b/tools/pipelines/build-test-tools.yml index a96f3f4a990d..eedbad940421 100644 --- a/tools/pipelines/build-test-tools.yml +++ b/tools/pipelines/build-test-tools.yml @@ -84,7 +84,7 @@ extends: publishOverride: ${{ parameters.publishOverride }} releaseBuildOverride: ${{ parameters.releaseBuildOverride }} buildToolsVersionToInstall: ${{ parameters.buildToolsVersionToInstall }} - buildDirectory: tools/test-tools + buildDirectory: ${{ variables.FluidFrameworkDirectory }}/tools/test-tools packageManagerInstallCommand: 'pnpm i --frozen-lockfile' packageManager: pnpm testCoverage: ${{ variables.testCoverage }} diff --git a/tools/pipelines/deploy-website.yml b/tools/pipelines/deploy-website.yml index e50fb545117b..8787a0c33ea6 100644 --- a/tools/pipelines/deploy-website.yml +++ b/tools/pipelines/deploy-website.yml @@ -109,6 +109,7 @@ stages: pool: Large-eastus2 steps: - checkout: self + path: $(FluidFrameworkDirectory) submodules: false clean: true @@ -116,14 +117,14 @@ stages: - template: /tools/pipelines/templates/include-install-pnpm.yml@self parameters: - buildDirectory: $(Build.SourcesDirectory)/docs + buildDirectory: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs - task: Bash@3 displayName: Install dependencies retryCountOnTaskFailure: 4 inputs: targetType: 'inline' - workingDirectory: $(Build.SourcesDirectory)/docs + workingDirectory: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs script: | set -eu -o pipefail # Ensure it's easy to tell which workspace this is running in by inspecting the logs @@ -134,7 +135,7 @@ stages: displayName: npm run build inputs: command: 'custom' - workingDir: $(Build.SourcesDirectory)/docs + workingDir: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs customCommand: 'run build' env: INSTRUMENTATION_KEY: $(INSTRUMENTATION_KEY) @@ -145,28 +146,28 @@ stages: displayName: 'Check inline script hashes correctness' inputs: targetType: 'filePath' - workingDirectory: $(Build.SourcesDirectory)/docs - filePath: '$(Build.SourcesDirectory)/docs/validateHashes.sh' + workingDirectory: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs + filePath: '$(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs/validateHashes.sh' # Run the tests - task: Npm@1 displayName: Run tests inputs: command: 'custom' - workingDir: $(Build.SourcesDirectory)/docs + workingDir: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs customCommand: 'run test' - task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0 displayName: 'Generate SBOM' inputs: - BuildDropPath: $(Build.SourcesDirectory)/docs/build + BuildDropPath: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs/build PackageName: 'fluidframework-docs' PackageVersion: '$(Build.BuildId)' - task: PublishPipelineArtifact@1 displayName: 'Publish site build artifact' inputs: - targetPath: '$(Build.SourcesDirectory)/docs/build' + targetPath: '$(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs/build' artifactName: 'fluidframework-docs' publishLocation: 'pipeline' @@ -219,7 +220,7 @@ stages: inputs: GdnBreakPolicyMinSev: Warning GdnBreakAllTools: true - GdnBreakBaselineFiles: '$(Build.SourcesDirectory)/docs/.gdnbaselines' + GdnBreakBaselineFiles: '$(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs/.gdnbaselines' GdnBreakGdnToolESLint: true GdnBreakGdnToolESLintSeverity: Warning GdnBreakPolicy: M365 @@ -246,6 +247,7 @@ stages: continueOnError: true steps: - checkout: self + path: $(FluidFrameworkDirectory) submodules: false clean: true @@ -253,14 +255,14 @@ stages: - template: /tools/pipelines/templates/include-install-pnpm.yml@self parameters: - buildDirectory: $(Build.SourcesDirectory)/docs + buildDirectory: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs - task: Bash@3 displayName: Install dependencies retryCountOnTaskFailure: 4 inputs: targetType: 'inline' - workingDirectory: $(Build.SourcesDirectory)/docs + workingDirectory: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs script: | set -eu -o pipefail # Ensure it's easy to tell which workspace this is running in by inspecting the logs @@ -271,14 +273,14 @@ stages: displayName: Build inputs: command: 'custom' - workingDir: $(Build.SourcesDirectory)/docs + workingDir: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs customCommand: 'run build' - task: Npm@1 displayName: Validate links inputs: command: 'custom' - workingDir: $(Build.SourcesDirectory)/docs + workingDir: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs customCommand: 'run ci:check-links' - stage: deploy @@ -298,6 +300,7 @@ stages: displayName: 'Deploy website' steps: - checkout: self + path: $(FluidFrameworkDirectory) submodules: false clean: true @@ -306,14 +309,14 @@ stages: inputs: source: current artifact: fluidframework-docs - path: '$(Build.SourcesDirectory)/docs/build' + path: '$(Pipeline.Workspace)/$(FluidFrameworkDirectory)/docs/build' - task: AzureStaticWebApp@0 displayName: 'Deploy website to ASWA' inputs: skip_app_build: true # site was built in previous stage skip_api_build: true # api is written in js, no build needed - cwd: $(Build.SourcesDirectory) + cwd: $(Pipeline.Workspace)/$(FluidFrameworkDirectory) app_location: 'docs/build' api_location: 'docs/api' output_location: '' diff --git a/tools/pipelines/publish-api-model-artifact.yml b/tools/pipelines/publish-api-model-artifact.yml index 26d8d77f4eb3..60d8c5989e67 100644 --- a/tools/pipelines/publish-api-model-artifact.yml +++ b/tools/pipelines/publish-api-model-artifact.yml @@ -100,12 +100,13 @@ stages: displayName: 'Check Version Deployment Condition' steps: - checkout: self + path: $(FluidFrameworkDirectory) submodules: false clean: true - template: /tools/pipelines/templates/include-install-build-tools.yml@self parameters: - buildDirectory: $(Build.SourcesDirectory) + buildDirectory: $(Pipeline.Workspace)/$(FluidFrameworkDirectory) - task: Bash@3 name: SetVersion displayName: 'Set Build Version' @@ -113,7 +114,7 @@ stages: VERSION_BUILDNUMBER: $(Build.BuildNumber) inputs: targetType: 'inline' - workingDirectory: $(Build.SourcesDirectory) + workingDirectory: $(Pipeline.Workspace)/$(FluidFrameworkDirectory) script: | # Generate the build version. Sets the environment variables version, codeVersion, and isLatest. flub generate buildVersion @@ -122,7 +123,7 @@ stages: displayName: 'Check Version Deployment Condition' inputs: targetType: 'inline' - workingDirectory: $(Build.SourcesDirectory) + workingDirectory: $(Pipeline.Workspace)/$(FluidFrameworkDirectory) script: | # Extract version without build number suffix VERSION_TRIMMED=$(echo $(SetVersion.version) | sed 's/-[0-9]*//') @@ -174,7 +175,7 @@ stages: inputs: SourceFolder: $(Pipeline.Workspace) Contents: '**/*.api.json' - TargetFolder: '$(Build.SourcesDirectory)/_api-extractor-temp' + TargetFolder: '$(Pipeline.Workspace)/$(FluidFrameworkDirectory)/_api-extractor-temp' OverWrite: false flattenFolders: true CleanTargetFolder: true @@ -182,13 +183,13 @@ stages: - task: PublishPipelineArtifact@1 displayName: 'Publish api-extractor JSON' inputs: - targetPath: '$(Build.SourcesDirectory)/_api-extractor-temp' + targetPath: '$(Pipeline.Workspace)/$(FluidFrameworkDirectory)/_api-extractor-temp' artifactName: 'api-extractor-combined' publishLocation: 'pipeline' - task: ArchiveFiles@2 inputs: - rootFolderOrFile: '$(Build.SourcesDirectory)/_api-extractor-temp' + rootFolderOrFile: '$(Pipeline.Workspace)/$(FluidFrameworkDirectory)/_api-extractor-temp' includeRootFolder: false archiveType: 'tar' # Options: zip, 7z, tar, wim tarCompression: 'gz' # Optional. Options: gz, bz2, xz, none diff --git a/tools/pipelines/repo-policy-check.yml b/tools/pipelines/repo-policy-check.yml index 94266f648220..00b023653844 100644 --- a/tools/pipelines/repo-policy-check.yml +++ b/tools/pipelines/repo-policy-check.yml @@ -19,6 +19,18 @@ variables: - name: pnpmStorePath value: $(Pipeline.Workspace)/.pnpm-store - group: ado-feeds +# ADO changes the value of Build.SourcesDirectory depending on whether 1 vs. many repositories are checked out. +# Using 's' is consistent with the behavior when multiple repositories are checked out and allows for more +# consistent build scripts. See this documentation for more details: +# https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path +# A relative path is required in some places (like "checkout" steps). To get the absolute path, concatenate it +# with the predefined variable "Pipeline.Workspace", e.g.: '$(Pipeline.Workspace)/${{ variables.FluidFrameworkDirectory }}' +- name: consistentSourcesDirectory + value: 's' +- name: FluidFrameworkDirectory + value: '${{ variables.consistentSourcesDirectory }}/FluidFramework' +- name: FFPipelineHostDirectory + value: '${{ variables.consistentSourcesDirectory }}/ff_pipeline_host' # The `resources` specify the location and version of the 1ES PT. resources: @@ -62,16 +74,31 @@ extends: jobs: - job: run_policy_check steps: + - task: Bash@3 + displayName: Variables + inputs: + targetType: 'inline' + workingDirectory: $(Pipeline.Workspace) + script: | + set -eu -o pipefail + echo " + Variables: + consistentSourcesDirectory=${{ variables.consistentSourcesDirectory }} + FluidFrameworkDirectory=${{ variables.FluidFrameworkDirectory }} + FFPipelineHostDirectory=${{ variables.FFPipelineHostDirectory }} + " + - checkout: self + path: ${{ variables.FluidFrameworkDirectory }} - template: /tools/pipelines/templates/include-use-node-version.yml@self - template: /tools/pipelines/templates/include-install-pnpm.yml@self parameters: - buildDirectory: . + buildDirectory: ${{ variables.FluidFrameworkDirectory }} - task: Bash@3 displayName: Install root dependencies inputs: targetType: 'inline' - workingDirectory: . + workingDirectory: $(Pipeline.Workspace)/${{ variables.FluidFrameworkDirectory }} script: | set -eu -o pipefail # We only need to install the root dependencies diff --git a/tools/pipelines/server-gitrest.yml b/tools/pipelines/server-gitrest.yml index 38b657b11f2b..19bf396c61de 100644 --- a/tools/pipelines/server-gitrest.yml +++ b/tools/pipelines/server-gitrest.yml @@ -107,9 +107,9 @@ extends: buildToolsVersionToInstall: ${{ parameters.buildToolsVersionToInstall }} dockerBuildBumpsVersion: true packageManagerInstallCommand: 'pnpm config set recursive-install false ; pnpm i ; pnpm config set recursive-install true' - additionalBuildArguments: --build-context root=$(Build.SourcesDirectory) + additionalBuildArguments: --build-context root=$(Pipeline.Workspace)/${{ variables.FluidFrameworkDirectory }} packageManager: pnpm - buildDirectory: server/gitrest + buildDirectory: ${{ variables.FluidFrameworkDirectory }}/server/gitrest containerName: fluidframework/routerlicious/gitrest test: test tagName: gitrest diff --git a/tools/pipelines/server-gitssh.yml b/tools/pipelines/server-gitssh.yml index 8d65af634525..b2256c5e055f 100644 --- a/tools/pipelines/server-gitssh.yml +++ b/tools/pipelines/server-gitssh.yml @@ -86,7 +86,7 @@ extends: shouldReleaseDockerImage: ${{ variables.releaseImage }} shouldPublishNpmPackages: ${{ variables.publish }} buildToolsVersionToInstall: ${{ parameters.buildToolsVersionToInstall }} - buildDirectory: server/gitssh + buildDirectory: ${{ variables.FluidFrameworkDirectory }}/server/gitssh containerName: fluidframework/routerlicious/gitssh setVersion: false enableDockerImagePull: false diff --git a/tools/pipelines/server-historian.yml b/tools/pipelines/server-historian.yml index 39257573bf8e..55c4c555f87a 100644 --- a/tools/pipelines/server-historian.yml +++ b/tools/pipelines/server-historian.yml @@ -104,13 +104,13 @@ extends: shouldPublishNpmPackages: ${{ variables.publish }} releaseKind: ${{ parameters.releaseKind }} buildToolsVersionToInstall: ${{ parameters.buildToolsVersionToInstall }} - buildDirectory: server/historian + buildDirectory: ${{ variables.FluidFrameworkDirectory }}/server/historian containerName: fluidframework/routerlicious/historian dockerBuildBumpsVersion: true # We need to install only the root dependencies; historian has native deps that don't install in CI since we use # Docker to do the actual build in CI. We need the root dependencies so setting package versions works. packageManagerInstallCommand: 'pnpm install --workspace-root' - additionalBuildArguments: --build-context root=$(Build.SourcesDirectory) + additionalBuildArguments: --build-context root=$(Pipeline.Workspace)/${{ variables.FluidFrameworkDirectory }} packageManager: pnpm test: test tagName: historian diff --git a/tools/pipelines/server-routerlicious.yml b/tools/pipelines/server-routerlicious.yml index 59da4e24566e..d8ad4b647156 100644 --- a/tools/pipelines/server-routerlicious.yml +++ b/tools/pipelines/server-routerlicious.yml @@ -123,7 +123,7 @@ extends: releaseKind: ${{ parameters.releaseKind }} buildToolsVersionToInstall: ${{ parameters.buildToolsVersionToInstall }} interdependencyRange: ${{ parameters.interdependencyRange }} - buildDirectory: server/routerlicious + buildDirectory: ${{ variables.FluidFrameworkDirectory }}/server/routerlicious containerName: fluidframework/routerlicious/server buildNumberInPatch: false dockerBuildBumpsVersion: true @@ -143,5 +143,5 @@ extends: - prettier - check:versions - generate:packageList - additionalBuildArguments: --build-context root=$(Build.SourcesDirectory) + additionalBuildArguments: --build-context root=$(Pipeline.Workspace)/${{ variables.FluidFrameworkDirectory }} enableDockerImagePull: false diff --git a/tools/pipelines/templates/build-docker-service.yml b/tools/pipelines/templates/build-docker-service.yml index 07708549cc77..436bf99e8e14 100644 --- a/tools/pipelines/templates/build-docker-service.yml +++ b/tools/pipelines/templates/build-docker-service.yml @@ -191,6 +191,7 @@ extends: steps: # Setup - checkout: self + path: $(FluidFrameworkDirectory) clean: true lfs: false submodules: false @@ -199,7 +200,7 @@ extends: displayName: Parameters inputs: targetType: 'inline' - workingDirectory: ${{ parameters.buildDirectory }} + workingDirectory: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} script: | # Note: deliberately not using `set -eu -o pipefail` because this script leverages the return code of grep # even in an error case @@ -241,6 +242,9 @@ extends: containerTagSuffix=$(containerTagSuffix) release=$(release) shouldPublish=$(shouldPublish) + consistentSourcesDirectory=$(consistentSourcesDirectory) + FluidFrameworkDirectory=$(FluidFrameworkDirectory) + FFPipelineHostDirectory=$(FFPipelineHostDirectory) " if [[ "$(release)" == "release" ]]; then @@ -302,7 +306,7 @@ extends: retryCountOnTaskFailure: 4 inputs: targetType: 'inline' - workingDirectory: ${{ parameters.buildDirectory }} + workingDirectory: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} script: | set -eu -o pipefail ${{ parameters.packageManagerInstallCommand }} @@ -354,10 +358,10 @@ extends: displayName: Copy CredScan suppressions file for container builds inputs: targetType: 'inline' - workingDirectory: $(Build.SourcesDirectory) + workingDirectory: $(Pipeline.Workspace)/$(FluidFrameworkDirectory) script: | set -eu -o pipefail - cp ./CredScanSuppressions.json ${{ parameters.buildDirectory }} + cp ./CredScanSuppressions.json $(Pipeline.Workspace)/${{ parameters.buildDirectory }} # The GitSSH Dockerfile does not have a 'base' target nor does it run pack/lint/test/docs tasks, so skip # all that for it. This feels hacky but it's a simple solution for now to the issue of trying to use variables @@ -366,8 +370,8 @@ extends: # Build base image where we can run pack/lint/test/docs tasks - task: 1ES.BuildContainerImage@1 inputs: - dockerfile: ${{ parameters.buildDirectory }}/Dockerfile - context: ${{ parameters.buildDirectory }} + dockerfile: $(Pipeline.Workspace)/${{ parameters.buildDirectory }}/Dockerfile + context: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} image: $(baseContainerTag) buildArguments: --target base $(DockerBuildArgs.output) useBuildKit: true @@ -410,7 +414,7 @@ extends: STAGING_PATH: $(Build.ArtifactStagingDirectory) inputs: targetType: 'inline' - workingDirectory: ${{ parameters.buildDirectory }} + workingDirectory: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} script: | set -eu -o pipefail flub list --no-private $RELEASE_GROUP --tarball --feed public --outFile $STAGING_PATH/pack/packagePublishOrder-public.txt @@ -507,8 +511,8 @@ extends: # Build final image - task: 1ES.BuildContainerImage@1 inputs: - dockerfile: ${{ parameters.buildDirectory }}/Dockerfile - context: ${{ parameters.buildDirectory }} + dockerfile: $(Pipeline.Workspace)/${{ parameters.buildDirectory }}/Dockerfile + context: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} image: $(containerTag) buildArguments: $(DockerBuildArgs.output) useBuildKit: true @@ -527,7 +531,7 @@ extends: name: ComputeFinalTagList inputs: targetType: 'inline' - workingDirectory: ${{ parameters.buildDirectory }} + workingDirectory: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} script: | set -eu -o pipefail # containerTag should always be pushed @@ -553,7 +557,7 @@ extends: displayName: Prune pnpm store inputs: targetType: 'inline' - workingDirectory: ${{ parameters.buildDirectory }} + workingDirectory: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} script: | set -eu -o pipefail pnpm store prune diff --git a/tools/pipelines/templates/build-npm-client-package.yml b/tools/pipelines/templates/build-npm-client-package.yml index 1366a5bdbd71..df9b08a374d0 100644 --- a/tools/pipelines/templates/build-npm-client-package.yml +++ b/tools/pipelines/templates/build-npm-client-package.yml @@ -198,7 +198,6 @@ extends: # Install all dependencies, not just the root ones dependencyInstallCommand: pnpm install --frozen-lockfile - # Install / Build / Test Stage - stage: build displayName: Build Stage @@ -230,6 +229,7 @@ extends: steps: # Setup - checkout: self + path: $(FluidFrameworkDirectory) clean: true lfs: '${{ parameters.checkoutSubmodules }}' submodules: '${{ parameters.checkoutSubmodules }}' @@ -244,7 +244,7 @@ extends: displayName: Parameters inputs: targetType: inline - workingDirectory: '${{ parameters.buildDirectory }}' + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' script: | # Note: deliberately not using `set -eu -o pipefail` because this script leverages the return code of grep # even in an error case @@ -271,6 +271,9 @@ extends: TestResultDirs=${{ convertToJson(parameters.testResultDirs) }} Variables: + consistentSourcesDirectory=$(consistentSourcesDirectory) + FluidFrameworkDirectory=$(FluidFrameworkDirectory) + FFPipelineHostDirectory=$(FFPipelineHostDirectory) pathToTelemetryGenerator=$(pathToTelemetryGenerator) BuildReason=${{ variables['Build.Reason'] }} @@ -364,22 +367,22 @@ extends: displayName: 'npm run webpack' inputs: command: custom - workingDir: '${{ parameters.buildDirectory }}' + workingDir: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' customCommand: 'run webpack' - task: Bash@3 displayName: Archive Build Output Content env: - WORKING_DIRECTORY: '${{ parameters.buildDirectory }}' + WORKING_DIRECTORY: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' inputs: targetType: filePath - workingDirectory: '${{ parameters.buildDirectory }}' - filePath: $(Build.SourcesDirectory)/scripts/pack-build-output.sh + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' + filePath: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/scripts/pack-build-output.sh - task: CopyFiles@2 displayName: Copy build_output_archive to artifact staging directory inputs: - sourceFolder: ${{ parameters.buildDirectory }}/build_output_archive + sourceFolder: $(Pipeline.Workspace)/${{ parameters.buildDirectory }}/build_output_archive targetFolder: $(Build.ArtifactStagingDirectory)/build_output_archive # Pack @@ -392,8 +395,8 @@ extends: STAGING_PATH: $(Build.ArtifactStagingDirectory) inputs: targetType: filePath - workingDirectory: '${{ parameters.buildDirectory }}' - filePath: $(Build.SourcesDirectory)/scripts/pack-packages.sh + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' + filePath: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/scripts/pack-packages.sh # At this point we want to publish the artifact with npm-packed packages, and the one with test files, # but as part of 1ES migration that's now part of templateContext.outputs below. @@ -404,7 +407,7 @@ extends: displayName: 'Calculate bundle sizes' inputs: command: custom - workingDir: '${{ parameters.buildDirectory }}' + workingDir: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' customCommand: 'run bundle-analysis:collect' # Copy files so all artifacts we publish end up under the same parent folder. @@ -412,7 +415,7 @@ extends: - task: CopyFiles@2 displayName: Copy bundle size files to artifact staging directory inputs: - sourceFolder: '${{ parameters.buildDirectory }}/artifacts/bundleAnalysis' + sourceFolder: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}/artifacts/bundleAnalysis' targetFolder: $(Build.ArtifactStagingDirectory)/bundleAnalysis # At this point we want to publish the artifact with the bundle size analysis, @@ -428,7 +431,7 @@ extends: TARGET_BRANCH_NAME: '$(targetBranchName)' inputs: command: custom - workingDir: '${{ parameters.buildDirectory }}' + workingDir: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' customCommand: 'run bundle-analysis:run' - ${{ if and(or(eq(variables['Build.Reason'], 'IndividualCI'), eq(variables['Build.Reason'], 'BatchedCI')), eq(variables['System.TeamProject'], 'internal')) }}: @@ -436,7 +439,7 @@ extends: displayName: List report.json inputs: targetType: inline - workingDirectory: '${{ parameters.buildDirectory }}' + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' script: | set -eu -o pipefail echo "Build Directory is ${{ parameters.buildDirectory }}"; @@ -457,7 +460,7 @@ extends: set -eu -o pipefail echo "Writing the following performance tests results to Aria/Kusto" echo "Report Size:" - ls -la '$(Build.SourcesDirectory)/examples/utils/bundle-size-tests/bundleAnalysis/report.json'; + ls -la '$(Pipeline.Workspace)/$(FluidFrameworkDirectory)/examples/utils/bundle-size-tests/bundleAnalysis/report.json'; npx telemetry-generator --handlerModule "$(pathToTelemetryGeneratorHandlers)/bundleSizeHandler.js" --dir '$(Build.ArtifactStagingDirectory)/bundleAnalysis/@fluid-example/bundle-size-tests'; # Docs @@ -466,7 +469,7 @@ extends: displayName: 'npm run ci:build:docs' inputs: command: custom - workingDir: '${{ parameters.buildDirectory }}' + workingDir: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' customCommand: 'run ci:build:docs' # Copy files so all artifacts we publish end up under the same parent folder. @@ -474,7 +477,7 @@ extends: - task: CopyFiles@2 displayName: Copy _api-extractor-temp files to artifact staging directory inputs: - sourceFolder: '${{ parameters.buildDirectory }}/_api-extractor-temp' + sourceFolder: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}/_api-extractor-temp' targetFolder: $(Build.ArtifactStagingDirectory)/_api-extractor-temp # At this point we want to publish the artifact with the _api-extractor-temp folder, @@ -489,7 +492,7 @@ extends: displayName: Reset lockfile inputs: targetType: inline - workingDirectory: '${{ parameters.buildDirectory }}' + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' script: | set -eu -o pipefail git checkout HEAD -- pnpm-lock.yaml @@ -499,7 +502,7 @@ extends: displayName: Prune pnpm store inputs: targetType: inline - workingDirectory: '${{ parameters.buildDirectory }}' + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' script: | set -eu -o pipefail pnpm store prune @@ -526,19 +529,19 @@ extends: script: | set -eu -o pipefail echo Generating .env - echo "DEVTOOLS_TELEMETRY_TOKEN=$(devtools-telemetry-key)" >> ./packages/tools/devtools/devtools-browser-extension/.env + echo "DEVTOOLS_TELEMETRY_TOKEN=$(devtools-telemetry-key)" >> $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/packages/tools/devtools/devtools-browser-extension/.env - task: Npm@1 displayName: Build devtools inputs: command: 'custom' - workingDir: ./packages/tools/devtools/devtools-browser-extension/ + workingDir: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/packages/tools/devtools/devtools-browser-extension/ customCommand: 'run webpack' - task: 1ES.PublishPipelineArtifact@1 displayName: Publish Artifact - Devtools Browser Extension inputs: - targetPath: './packages/tools/devtools/devtools-browser-extension/dist/bundle/' + targetPath: '$(Pipeline.Workspace)/$(FluidFrameworkDirectory)/packages/tools/devtools/devtools-browser-extension/dist/bundle/' artifactName: 'devtools-extension-bundle_attempt-$(System.JobAttempt)' publishLocation: 'pipeline' @@ -592,7 +595,7 @@ extends: # Absolute path to the folder that contains the source code for the telemetry-generator package, which is # used in a few places in the pipeline to push custom telemetry to Kusto. - name: absolutePathToTelemetryGenerator - value: $(Build.SourcesDirectory)/tools/telemetry-generator + value: $(Pipeline.Workspace)/${{ parameters.buildDirectory }}/tools/telemetry-generator readonly: true # We already run CodeQL in the main build job, so we don't need to run it again here. # Note that we need to disable it in the right way for 1ES pipeline templates, vs manual CodeQL tasks. @@ -604,6 +607,7 @@ extends: steps: # Setup - checkout: self + path: $(FluidFrameworkDirectory) clean: true lfs: '${{ parameters.checkoutSubmodules }}' submodules: '${{ parameters.checkoutSubmodules }}' @@ -636,7 +640,7 @@ extends: - script: | echo "Extracting build output archive contents..." - tar --extract --gzip --file $(Build.StagingDirectory)/build_output_archive.tar.gz --directory $(Build.SourcesDirectory) + tar --extract --gzip --file $(Build.StagingDirectory)/build_output_archive.tar.gz --directory $(Pipeline.Workspace)/${{ parameters.buildDirectory }} displayName: Extract Build Output Contents # Set variable startTest if everything is good so far and we'll start running tests, @@ -657,7 +661,7 @@ extends: condition: and(succeededOrFailed(), eq(variables['startTest'], 'true')) inputs: command: custom - workingDir: '${{ parameters.buildDirectory }}' + workingDir: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' customCommand: 'run test:copyresults' # Test - Upload coverage results @@ -669,7 +673,7 @@ extends: condition: and(succeededOrFailed(), eq(variables['startTest'], 'true')) inputs: targetType: 'inline' - workingDirectory: '${{ parameters.buildDirectory }}' + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' script: | set -eu -o pipefail test -d nyc/report && echo '##vso[task.setvariable variable=ReportDirExists;]true' || echo 'No nyc/report directory' @@ -679,7 +683,7 @@ extends: condition: and(succeededOrFailed(), eq(variables['ReportDirExists'], 'true')) inputs: targetType: 'inline' - workingDirectory: '${{ parameters.buildDirectory }}/nyc/report' + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}/nyc/report' script: | set -eu -o pipefail sed -e 's/\(filename=\".*[\\/]external .*\)"\(.*\)""/\1\"\2\""/' cobertura-coverage.xml > cobertura-coverage-patched.xml @@ -688,13 +692,13 @@ extends: displayName: Publish Code Coverage condition: and(succeededOrFailed(), eq(variables['ReportDirExists'], 'true')) inputs: - summaryFileLocation: ${{ parameters.buildDirectory }}/nyc/report/cobertura-coverage-patched.xml + summaryFileLocation: $(Pipeline.Workspace)/${{ parameters.buildDirectory }}/nyc/report/cobertura-coverage-patched.xml failIfCoverageEmpty: true - task: CopyFiles@2 displayName: Copy code coverage report to artifact staging directory condition: and(succeededOrFailed(), eq(variables['ReportDirExists'], 'true')) inputs: - sourceFolder: '${{ parameters.buildDirectory }}/nyc/report' + sourceFolder: $(Pipeline.Workspace)/${{ parameters.buildDirectory }}/nyc/report targetFolder: $(Build.ArtifactStagingDirectory)/codeCoverageAnalysis - task: Bash@3 displayName: Report Code Coverage Comparison @@ -711,7 +715,7 @@ extends: ADO_CI_BUILD_DEFINITION_ID_PR: 11 inputs: targetType: inline - workingDirectory: '${{ parameters.buildDirectory }}' + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' script: | set -eu -o pipefail echo "Github Repository Name: $GITHUB_REPOSITORY_NAME" @@ -752,7 +756,7 @@ extends: # Absolute path to the folder that contains the source code for the telemetry-generator package, which is # used in a few places in the pipeline to push custom telemetry to Kusto. - name: absolutePathToTelemetryGenerator - value: $(Build.SourcesDirectory)/tools/telemetry-generator + value: $(Pipeline.Workspace)/${{ parameters.buildDirectory }}/tools/telemetry-generator readonly: true # We already run CodeQL in the main build job, so we don't need to run it again here. # Note that we need to disable it in the right way for 1ES pipeline templates, vs manual CodeQL tasks. @@ -763,6 +767,7 @@ extends: steps: # Setup - checkout: self + path: $(FluidFrameworkDirectory) clean: true lfs: '${{ parameters.checkoutSubmodules }}' submodules: '${{ parameters.checkoutSubmodules }}' @@ -788,7 +793,7 @@ extends: - script: | echo "Extracting build output archive contents..." - tar --extract --gzip --file $(Build.StagingDirectory)/build_output_archive.tar.gz --directory $(Build.SourcesDirectory) + tar --extract --gzip --file $(Build.StagingDirectory)/build_output_archive.tar.gz --directory $(Pipeline.Workspace)/${{ parameters.buildDirectory }} displayName: Extract Build Output Contents # Test @@ -809,7 +814,7 @@ extends: displayName: 'npm run test:copyresults' inputs: command: custom - workingDir: '${{ parameters.buildDirectory }}' + workingDir: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' customCommand: 'run test:copyresults' - ${{ if contains(test.name, 'tinylicious') }}: @@ -843,7 +848,7 @@ extends: while IFS= read -r LOG; do # Extract the relative path by removing the source directory prefix # Example: /build/source/packages/test/foo/tinylicious.log -> packages/test/foo/tinylicious.log - RELATIVE_PATH="${LOG#$(Build.SourcesDirectory)/}" + RELATIVE_PATH="${LOG#$(Pipeline.Workspace)/${{ parameters.buildDirectory }}/}" # Extract the directory path and filename separately DIR_PATH=$(dirname "$RELATIVE_PATH") @@ -856,13 +861,13 @@ extends: SAFE_DIR="${DIR_PATH//\//_}" SAFE_NAME="${SAFE_TEST_NAME}-${SAFE_DIR}-${FILENAME}" - TEMP_LOG="$(Build.SourcesDirectory)/$SAFE_NAME" + TEMP_LOG="$(Pipeline.Workspace)/${{ parameters.buildDirectory }}/$SAFE_NAME" cp "$LOG" "$TEMP_LOG" echo "Found tinylicious log at '$LOG'. Uploading as '$SAFE_NAME'."; echo "##vso[task.uploadfile]$TEMP_LOG"; FOUND=1 - done < <(find "$(Build.SourcesDirectory)" -maxdepth 5 -name "tinylicious.log" -type f 2>/dev/null) + done < <(find "$(Pipeline.Workspace)/${{ parameters.buildDirectory }}" -maxdepth 5 -name "tinylicious.log" -type f 2>/dev/null) if [ "$FOUND" -eq 0 ]; then echo "##vso[task.logissue type=warning]No tinylicious log files found."; diff --git a/tools/pipelines/templates/build-npm-package.yml b/tools/pipelines/templates/build-npm-package.yml index 2b0b8d5c4d29..ab1e64411e65 100644 --- a/tools/pipelines/templates/build-npm-package.yml +++ b/tools/pipelines/templates/build-npm-package.yml @@ -221,6 +221,7 @@ extends: steps: # Setup - checkout: self + path: $(FluidFrameworkDirectory) clean: true lfs: '${{ parameters.checkoutSubmodules }}' submodules: '${{ parameters.checkoutSubmodules }}' @@ -229,7 +230,7 @@ extends: displayName: Parameters inputs: targetType: inline - workingDirectory: '${{ parameters.buildDirectory }}' + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' script: | # Note: deliberately not using `set -eu -o pipefail` because this script leverages the return code of grep # even in an error case @@ -256,6 +257,9 @@ extends: TestResultDirs=${{ convertToJson(parameters.testResultDirs) }} Variables: + consistentSourcesDirectory=$(consistentSourcesDirectory) + FluidFrameworkDirectory=$(FluidFrameworkDirectory) + FFPipelineHostDirectory=$(FFPipelineHostDirectory) pathToTelemetryGenerator=$(pathToTelemetryGenerator) BuildReason=${{ variables['Build.Reason'] }} @@ -372,7 +376,7 @@ extends: targetType: inline script: | set -eu -o pipefail - PATH_TO_TINYLICIOUS_LOG=$(Build.SourcesDirectory)/packages/test/test-end-to-end-tests/tinylicious.log; + PATH_TO_TINYLICIOUS_LOG=$(Pipeline.Workspace)/$(FluidFrameworkDirectory)/packages/test/test-end-to-end-tests/tinylicious.log; if [ -f $PATH_TO_TINYLICIOUS_LOG ] ; then echo "Found file at '$PATH_TO_TINYLICIOUS_LOG'. Uploading."; echo "##vso[task.uploadfile]$PATH_TO_TINYLICIOUS_LOG"; @@ -393,7 +397,7 @@ extends: condition: and(succeededOrFailed(), eq(variables['startTest'], 'true')) inputs: targetType: 'inline' - workingDirectory: '${{ parameters.buildDirectory }}' + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' script: | set -eu -o pipefail test -d nyc/report && echo '##vso[task.setvariable variable=ReportDirExists;]true' || echo 'No nyc/report directory' @@ -402,7 +406,7 @@ extends: condition: and(succeededOrFailed(), eq(variables['ReportDirExists'], 'true')) inputs: targetType: 'inline' - workingDirectory: '${{ parameters.buildDirectory }}/nyc/report' + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}/nyc/report' script: | set -eu -o pipefail sed -e 's/\(filename=\".*[\\/]external .*\)"\(.*\)""/\1\"\2\""/' cobertura-coverage.xml > cobertura-coverage-patched.xml @@ -410,13 +414,13 @@ extends: displayName: Publish Code Coverage condition: and(succeededOrFailed(), eq(variables['ReportDirExists'], 'true')) inputs: - summaryFileLocation: ${{ parameters.buildDirectory }}/nyc/report/cobertura-coverage-patched.xml + summaryFileLocation: $(Pipeline.Workspace)/${{ parameters.buildDirectory }}/nyc/report/cobertura-coverage-patched.xml failIfCoverageEmpty: true - task: CopyFiles@2 displayName: Copy code coverage report to artifact staging directory condition: and(succeededOrFailed(), eq(variables['ReportDirExists'], 'true')) inputs: - sourceFolder: '${{ parameters.buildDirectory }}/nyc/report' + sourceFolder: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}/nyc/report' targetFolder: $(Build.ArtifactStagingDirectory)/codeCoverageAnalysis - task: Bash@3 displayName: Report Code Coverage Comparison @@ -433,7 +437,7 @@ extends: ADO_CI_BUILD_DEFINITION_ID_PR: 11 inputs: targetType: inline - workingDirectory: '${{ parameters.buildDirectory }}' + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' script: | set -eu -o pipefail echo "Github Repository Name: $GITHUB_REPOSITORY_NAME" @@ -462,8 +466,8 @@ extends: STAGING_PATH: $(Build.ArtifactStagingDirectory) inputs: targetType: filePath - workingDirectory: '${{ parameters.buildDirectory }}' - filePath: $(Build.SourcesDirectory)/scripts/pack-packages.sh + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' + filePath: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/scripts/pack-packages.sh # At this point we want to publish the artifact with npm-packed packages, and the one with test files, # but as part of 1ES migration that's now part of templateContext.outputs below. @@ -474,7 +478,7 @@ extends: displayName: 'Calculate bundle sizes' inputs: command: custom - workingDir: '${{ parameters.buildDirectory }}' + workingDir: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' customCommand: 'run bundle-analysis:collect' # Copy files so all artifacts we publish end up under the same parent folder. @@ -482,7 +486,7 @@ extends: - task: CopyFiles@2 displayName: Copy bundle size files to artifact staging directory inputs: - sourceFolder: '${{ parameters.buildDirectory }}/artifacts/bundleAnalysis' + sourceFolder: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}/artifacts/bundleAnalysis' targetFolder: $(Build.ArtifactStagingDirectory)/bundleAnalysis # At this point we want to publish the artifact with the bundle size analysis, @@ -498,7 +502,7 @@ extends: TARGET_BRANCH_NAME: '$(targetBranchName)' inputs: command: custom - workingDir: '${{ parameters.buildDirectory }}' + workingDir: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' customCommand: 'run bundle-analysis:run' - ${{ if and(or(eq(variables['Build.Reason'], 'IndividualCI'), eq(variables['Build.Reason'], 'BatchedCI')), eq(variables['System.TeamProject'], 'internal')) }}: @@ -506,7 +510,7 @@ extends: displayName: List report.json inputs: targetType: inline - workingDirectory: '${{ parameters.buildDirectory }}' + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' script: | set -eu -o pipefail echo "Build Directory is ${{ parameters.buildDirectory }}"; @@ -527,7 +531,7 @@ extends: set -eu -o pipefail echo "Writing the following performance tests results to Aria/Kusto" echo "Report Size:" - ls -la '$(Build.SourcesDirectory)/examples/utils/bundle-size-tests/bundleAnalysis/report.json'; + ls -la '$(Pipeline.Workspace)/$(FluidFrameworkDirectory)/examples/utils/bundle-size-tests/bundleAnalysis/report.json'; npx telemetry-generator --handlerModule "$(pathToTelemetryGeneratorHandlers)/bundleSizeHandler.js" --dir '$(Build.ArtifactStagingDirectory)/bundleAnalysis/@fluid-example/bundle-size-tests'; # Docs @@ -536,7 +540,7 @@ extends: displayName: 'npm run ci:build:docs' inputs: command: custom - workingDir: '${{ parameters.buildDirectory }}' + workingDir: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' customCommand: 'run ci:build:docs' # Copy files so all artifacts we publish end up under the same parent folder. @@ -544,7 +548,7 @@ extends: - task: CopyFiles@2 displayName: Copy _api-extractor-temp files to artifact staging directory inputs: - sourceFolder: '${{ parameters.buildDirectory }}/_api-extractor-temp' + sourceFolder: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}/_api-extractor-temp' targetFolder: $(Build.ArtifactStagingDirectory)/_api-extractor-temp # At this point we want to publish the artifact with the _api-extractor-temp folder, @@ -559,7 +563,7 @@ extends: displayName: Reset lockfile inputs: targetType: inline - workingDirectory: '${{ parameters.buildDirectory }}' + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' script: | set -eu -o pipefail git checkout HEAD -- pnpm-lock.yaml @@ -569,7 +573,7 @@ extends: displayName: Prune pnpm store inputs: targetType: inline - workingDirectory: '${{ parameters.buildDirectory }}' + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' script: | set -eu -o pipefail pnpm store prune @@ -578,6 +582,7 @@ extends: displayName: Check for extraneous modified files inputs: targetType: inline + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' script: | # Note: deliberately not using `set -eu -o pipefail` because this script leverages the return code of grep # even in an error case diff --git a/tools/pipelines/templates/include-build-lint.yml b/tools/pipelines/templates/include-build-lint.yml index 2ea7a15e0be0..4fc875f2d32d 100644 --- a/tools/pipelines/templates/include-build-lint.yml +++ b/tools/pipelines/templates/include-build-lint.yml @@ -4,20 +4,20 @@ # include-build-lint template for the Build and Lint step in client build and test stability pipeline parameters: -- name: taskBuild - type: string +- name: taskBuild + type: string default: ci:build -- name: taskLint - type: boolean +- name: taskLint + type: boolean default: true - name: taskLintName type: string - default: lint + default: lint - name: buildDirectory - type: string + type: string steps: - ${{ if ne(parameters.taskBuild, 'false') }}: @@ -29,7 +29,7 @@ steps: FLUB_TYPETEST_SKIP_VERSION_OUTPUT: $(testBuild) inputs: command: 'custom' - workingDir: ${{ parameters.buildDirectory }} + workingDir: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} customCommand: 'run ${{ parameters.taskBuild }}' - ${{ if ne(parameters.taskLint, false) }}: @@ -37,5 +37,5 @@ steps: displayName: npm run ${{ parameters.taskLintName }} inputs: command: 'custom' - workingDir: ${{ parameters.buildDirectory }} - customCommand: 'run ${{ parameters.taskLintName }}' \ No newline at end of file + workingDir: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} + customCommand: 'run ${{ parameters.taskLintName }}' diff --git a/tools/pipelines/templates/include-install-build-tools.yml b/tools/pipelines/templates/include-install-build-tools.yml index 10ffbbc52d9a..24a6d29baa0d 100644 --- a/tools/pipelines/templates/include-install-build-tools.yml +++ b/tools/pipelines/templates/include-install-build-tools.yml @@ -20,13 +20,12 @@ parameters: default: $(Pipeline.Workspace)/.pnpm-store steps: - # These steps should ONLY run if we're using the repo version of the build tools. These steps are mutually exclusive # with the next group of steps. - ${{ if eq(parameters.buildToolsVersionToInstall, 'repo') }}: - template: /tools/pipelines/templates/include-install-pnpm.yml@self parameters: - buildDirectory: $(Build.SourcesDirectory)/build-tools + buildDirectory: $(FluidFrameworkDirectory)/build-tools pnpmStorePath: ${{ parameters.pnpmStorePath }} enableCache: false @@ -35,7 +34,7 @@ steps: displayName: Install Fluid Build Tools (from repo) inputs: targetType: 'inline' - workingDirectory: $(Build.SourcesDirectory)/build-tools + workingDirectory: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/build-tools script: | set -eu -o pipefail pnpm i --frozen-lockfile @@ -53,7 +52,7 @@ steps: displayName: Install Fluid Build Tools (from npm) inputs: targetType: 'inline' - workingDirectory: ${{ parameters.buildDirectory }} + workingDirectory: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} script: | set -eu -o pipefail echo "${{ parameters.buildToolsVersionToInstall }}" @@ -64,7 +63,7 @@ steps: displayName: Check Build Tools Installation inputs: targetType: 'inline' - workingDirectory: ${{ parameters.buildDirectory }} + workingDirectory: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} script: | set -eu -o pipefail # Output the help and full command list for debugging purposes diff --git a/tools/pipelines/templates/include-install-pnpm.yml b/tools/pipelines/templates/include-install-pnpm.yml index c053f33c398c..0a0e157a2ba2 100644 --- a/tools/pipelines/templates/include-install-pnpm.yml +++ b/tools/pipelines/templates/include-install-pnpm.yml @@ -43,7 +43,7 @@ steps: inputs: # Caches are already scoped to individual pipelines, so no need to include the release group name or tag # in the cache key - key: 'pnpm-store | "$(Agent.OS)" | ${{ parameters.buildDirectory }}/pnpm-lock.yaml' + key: 'pnpm-store | "$(Agent.OS)" | $(Pipeline.Workspace)/${{ parameters.buildDirectory }}/pnpm-lock.yaml' path: ${{ parameters.pnpmStorePath }} restoreKeys: | pnpm-store | "$(Agent.OS)" @@ -55,7 +55,7 @@ steps: condition: or(succeeded(), canceled()) inputs: targetType: 'inline' - workingDirectory: ${{ parameters.buildDirectory }} + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' # workspace-concurrency 0 means use use the CPU core count. This is better than the default (4) for larger agents. script: | set -eu -o pipefail diff --git a/tools/pipelines/templates/include-install.yml b/tools/pipelines/templates/include-install.yml index 81e1d471c491..1230fde972f8 100644 --- a/tools/pipelines/templates/include-install.yml +++ b/tools/pipelines/templates/include-install.yml @@ -34,7 +34,7 @@ steps: retryCountOnTaskFailure: 4 inputs: targetType: 'inline' - workingDirectory: ${{ parameters.buildDirectory }} + workingDirectory: '$(Pipeline.Workspace)/${{ parameters.buildDirectory }}' script: | set -eu -o pipefail ${{ parameters.packageManagerInstallCommand }} diff --git a/tools/pipelines/templates/include-policy-check.yml b/tools/pipelines/templates/include-policy-check.yml index 9503095da9fc..fabfae50e000 100644 --- a/tools/pipelines/templates/include-policy-check.yml +++ b/tools/pipelines/templates/include-policy-check.yml @@ -33,18 +33,20 @@ stages: - job: displayName: Run checks steps: + - checkout: self + path: $(FluidFrameworkDirectory) - template: /tools/pipelines/templates/include-use-node-version.yml@self - template: /tools/pipelines/templates/include-install-pnpm.yml@self parameters: - buildDirectory: $(Build.SourcesDirectory) + buildDirectory: ${{ parameters.buildDirectory }} - task: Bash@3 displayName: Install dependencies retryCountOnTaskFailure: 4 inputs: targetType: 'inline' - workingDirectory: ${{ parameters.buildDirectory }} + workingDirectory: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} script: | set -eu -o pipefail ${{ parameters.dependencyInstallCommand }} @@ -55,7 +57,7 @@ stages: displayName: npm run ${{ check }} inputs: command: 'custom' - workingDir: ${{ parameters.buildDirectory }} + workingDir: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} customCommand: 'run ${{ check }}' condition: succeededOrFailed() diff --git a/tools/pipelines/templates/include-process-test-results.yml b/tools/pipelines/templates/include-process-test-results.yml index cd18fe358d9f..ac30206d6809 100644 --- a/tools/pipelines/templates/include-process-test-results.yml +++ b/tools/pipelines/templates/include-process-test-results.yml @@ -21,6 +21,6 @@ steps: inputs: testResultsFormat: 'JUnit' testResultsFiles: '**/*junit-report.xml' - searchFolder: ${{ parameters.buildDirectory }}/${{ testResultDir }} + searchFolder: $(Pipeline.Workspace)/${{ parameters.buildDirectory }}/${{ testResultDir }} mergeTestResults: false condition: and(succeededOrFailed(), eq(variables['startTest'], 'true')) diff --git a/tools/pipelines/templates/include-publish-npm-package.yml b/tools/pipelines/templates/include-publish-npm-package.yml index bafca1fc6e8d..18a395633b99 100644 --- a/tools/pipelines/templates/include-publish-npm-package.yml +++ b/tools/pipelines/templates/include-publish-npm-package.yml @@ -117,4 +117,4 @@ stages: jobs: - template: /tools/pipelines/templates/upload-dev-manifest.yml@self parameters: - buildDirectory: '${{ parameters.buildDirectory }}' + buildDirectory: ${{ parameters.buildDirectory }} diff --git a/tools/pipelines/templates/include-set-package-version.yml b/tools/pipelines/templates/include-set-package-version.yml index 5a70564b0376..d10edee4aeba 100644 --- a/tools/pipelines/templates/include-set-package-version.yml +++ b/tools/pipelines/templates/include-set-package-version.yml @@ -68,7 +68,7 @@ steps: PACKAGE_TYPES_FIELD: ${{ parameters.packageTypesOverride }} inputs: targetType: 'inline' - workingDirectory: ${{ parameters.buildDirectory }} + workingDirectory: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} script: | set -eu -o pipefail # expect release group root package.json be in the current working directory @@ -91,7 +91,7 @@ steps: continueOnError: false inputs: targetType: 'inline' - workingDirectory: ${{ parameters.buildDirectory }} + workingDirectory: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} script: | set -eu -o pipefail # At this point in the pipeline the build hasn't been done, so we skip checking if the types files and other build outputs exist. @@ -106,8 +106,8 @@ steps: INTERDEPENDENCY_RANGE: ${{ parameters.interdependencyRange }} inputs: targetType: 'filePath' - workingDirectory: ${{ parameters.buildDirectory }} - filePath: $(Build.SourcesDirectory)/scripts/update-package-version.sh + workingDirectory: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} + filePath: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/scripts/update-package-version.sh # This task is a last-minute verification that no Fluid internal versions show up with caret dependencies. This is to # help find and prevent bugs in the version bumping tools. @@ -115,7 +115,7 @@ steps: displayName: Check for caret dependencies on internal versions inputs: targetType: 'inline' - workingDirectory: ${{ parameters.buildDirectory }} + workingDirectory: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} script: | # Note: deliberately not using `set -eu -o pipefail` because this script leverages the return code of grep # even in an error case @@ -131,7 +131,7 @@ steps: displayName: Check for caret dependencies on dev versions inputs: targetType: 'inline' - workingDirectory: ${{ parameters.buildDirectory }} + workingDirectory: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} script: | # Note: deliberately not using `set -eu -o pipefail` because this script leverages the return code of grep # even in an error case diff --git a/tools/pipelines/templates/include-test-perf-benchmarks.yml b/tools/pipelines/templates/include-test-perf-benchmarks.yml index 3cfdbb26e240..a049a6ab1876 100644 --- a/tools/pipelines/templates/include-test-perf-benchmarks.yml +++ b/tools/pipelines/templates/include-test-perf-benchmarks.yml @@ -30,6 +30,7 @@ parameters: steps: - checkout: ff_pipeline_host + path: $(FFPipelineHostDirectory) - task: Bash@3 displayName: Print parameter/variable values for troubleshooting @@ -56,7 +57,7 @@ steps: - template: /tools/pipelines/templates/include-install.yml@self parameters: packageManager: pnpm - buildDirectory: $(Build.SourcesDirectory) + buildDirectory: $(Pipeline.Workspace)/$(FFPipelineHostDirectory) packageManagerInstallCommand: pnpm install primaryRegistry: $(ado-feeds-ff-download-only) userNpmrcPath: ${{ parameters.userNpmrcDirectory }}/.npmrc diff --git a/tools/pipelines/templates/include-test-real-service.yml b/tools/pipelines/templates/include-test-real-service.yml index ac0e71536551..6ed46687379c 100644 --- a/tools/pipelines/templates/include-test-real-service.yml +++ b/tools/pipelines/templates/include-test-real-service.yml @@ -155,16 +155,9 @@ stages: # basic ANSI color support though, so force that in the pipeline - name: FORCE_COLOR value: 1 - - name: pipelineHostPath - ${{ if eq(parameters.runAgainstDocker, true) }}: - # Running against docker means we'll check out the FluidFramework repo as well as ff_pipeline_host - # When multiple repos are checked out, they are checked out to $(Build.SourcesDirectory)/ - value: $(Build.SourcesDirectory)/ff_pipeline_host - ${{ else }}: - value: $(Build.SourcesDirectory) - name: testPackageDir - value: '${{ variables.pipelineHostPath }}/node_modules/${{ parameters.testPackage }}' + value: '$(FFPipelineHostDirectory)/node_modules/${{ parameters.testPackage }}' - name: testPackageFilePattern value: ${{ replace(replace(parameters.testPackage, '@', '' ), '/', '-') }}-[0-9]*.tgz # Note that this path must match the path that the packed packages are saved to in the build pipeline. @@ -178,6 +171,7 @@ stages: steps: # Setup - checkout: ff_pipeline_host + path: $(FFPipelineHostDirectory) clean: true # Install self-signed cert for R11s deployment in local cert store @@ -229,14 +223,14 @@ stages: testFileTarName=${{ parameters.testFileTarName }} artifactPipeline=${{ variables.artifactPipeline }} artifactBuildId=${{ parameters.artifactBuildId }} - pipelineHostPath=${{ variables.pipelineHostPath }} + FFPipelineHostDirectory=$(FFPipelineHostDirectory) " - template: /tools/pipelines/templates/include-use-node-version.yml@self - template: /tools/pipelines/templates/include-install-pnpm.yml@self parameters: - buildDirectory: ${{ variables.pipelineHostPath }} + buildDirectory: $(FFPipelineHostDirectory) - template: /tools/pipelines/templates/include-setup-npmrc-for-download.yml@self @@ -244,6 +238,7 @@ stages: - ${{ if eq(parameters.runAgainstDocker, true) }}: # Checks out FluidFramework repo - checkout: self + path: $(FluidFrameworkDirectory) clean: true - task: Docker@2 displayName: 'Login to container registry' @@ -254,7 +249,7 @@ stages: displayName: 'Start routerlicious environment in docker' inputs: containerregistrytype: 'Container Registry' - dockerComposeFile: $(Build.SourcesDirectory)/FluidFramework/server/docker-compose.yml + dockerComposeFile: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/server/docker-compose.yml action: 'Run a Docker Compose command' dockerComposeCommand: 'up -d --wait' projectName: 'routerlicious' @@ -268,15 +263,15 @@ stages: displayName: 'Wait for services to be ready' inputs: targetType: 'filePath' - filePath: $(Build.SourcesDirectory)/FluidFramework/scripts/wait-for-docker-services.sh + filePath: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/scripts/wait-for-docker-services.sh env: - DOCKER_COMPOSE_FILE: $(Build.SourcesDirectory)/FluidFramework/server/docker-compose.yml + DOCKER_COMPOSE_FILE: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/server/docker-compose.yml - task: Bash@3 displayName: Install base dependencies inputs: targetType: 'inline' - workingDirectory: ${{ variables.pipelineHostPath }} + workingDirectory: $(FFPipelineHostDirectory) script: | set -eu -o pipefail pnpm install @@ -306,7 +301,7 @@ stages: displayName: Initialize inputs: targetType: 'inline' - workingDirectory: ${{ variables.pipelineHostPath }} + workingDirectory: $(FFPipelineHostDirectory) script: | set -eu -o pipefail if [[ `ls -1 ${{ variables.testPackagePathPattern }} | wc -l` -eq 1 ]]; then @@ -325,7 +320,7 @@ stages: retryCountOnTaskFailure: 10 inputs: targetType: 'inline' - workingDirectory: ${{ variables.pipelineHostPath }} + workingDirectory: $(FFPipelineHostDirectory) script: 'pnpm install $(Initialize.testPackageTgz)' # Download Test Files & Install Extra Dependencies @@ -356,7 +351,7 @@ stages: displayName: Unpack test files inputs: targetType: 'inline' - workingDirectory: ${{ variables.pipelineHostPath }}/node_modules/${{ parameters.testPackage }} + workingDirectory: $(FFPipelineHostDirectory)/node_modules/${{ parameters.testPackage }} script: | set -eu -o pipefail @@ -373,15 +368,15 @@ stages: - template: /tools/pipelines/templates/include-copy-dev-dependencies.yml@self parameters: - sourcePackageLocation: ${{ variables.pipelineHostPath }}/node_modules/${{ parameters.testPackage }} - destPackageLocation: ${{ variables.pipelineHostPath }} + sourcePackageLocation: $(FFPipelineHostDirectory)/node_modules/${{ parameters.testPackage }} + destPackageLocation: $(FFPipelineHostDirectory) - ${{ if eq(parameters.cacheCompatVersionsInstalls, true) }}: - task: Bash@3 displayName: Compute compat versions install location and version inputs: targetType: 'inline' - workingDirectory: ${{ variables.pipelineHostPath }}/node_modules/@fluid-private/test-end-to-end-tests + workingDirectory: $(FFPipelineHostDirectory)/node_modules/@fluid-private/test-end-to-end-tests # Using import.meta.resolve to compute this is more resilient to different install tree types. # Also note that test-version-utils is esm-only, so cannot be loaded with require. script: | @@ -458,7 +453,7 @@ stages: login__odsp__test__tenants: $(tenantCreds) inputs: command: 'custom' - workingDir: ${{ variables.pipelineHostPath }}/node_modules/${{ parameters.testPackage }} + workingDir: $(FFPipelineHostDirectory)/node_modules/${{ parameters.testPackage }} customCommand: 'run ${{ parameters.testCommand }} -- ${{ variant.flags }}' - ${{ if eq(parameters.skipTestResultPublishing, false) }}: @@ -500,7 +495,7 @@ stages: - task: PublishPipelineArtifact@1 displayName: Publish Artifact - Tinylicious Log inputs: - targetPath: '${{ variables.pipelineHostPath }}/node_modules/${{ parameters.testPackage }}/tinylicious.log' + targetPath: '$(FFPipelineHostDirectory)/node_modules/${{ parameters.testPackage }}/tinylicious.log' artifactName: 'tinyliciousLog_attempt-$(System.JobAttempt)' publishLocation: 'pipeline' condition: always() @@ -546,7 +541,7 @@ stages: condition: always() inputs: containerregistrytype: 'Container Registry' - dockerComposeFile: $(Build.SourcesDirectory)/FluidFramework/server/docker-compose.yml + dockerComposeFile: $(Pipeline.Workspace)/$(FluidFrameworkDirectory)/server/docker-compose.yml action: 'Run a Docker Compose command' dockerComposeCommand: 'down' projectName: 'routerlicious' diff --git a/tools/pipelines/templates/include-test-task.yml b/tools/pipelines/templates/include-test-task.yml index c4a4aa64adec..6901553a5d7b 100644 --- a/tools/pipelines/templates/include-test-task.yml +++ b/tools/pipelines/templates/include-test-task.yml @@ -21,7 +21,7 @@ steps: displayName: npm run ${{ parameters.taskTestStep }}:coverage inputs: command: 'custom' - workingDir: ${{ parameters.buildDirectory }} + workingDir: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} customCommand: 'run ${{ parameters.taskTestStep }}:coverage' condition: and(succeededOrFailed(), eq(variables['startTest'], 'true')) env: @@ -37,7 +37,7 @@ steps: displayName: npm run ${{ parameters.taskTestStep }} inputs: command: 'custom' - workingDir: ${{ parameters.buildDirectory }} + workingDir: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} customCommand: 'run ${{ parameters.taskTestStep }}' condition: and(succeededOrFailed(), eq(variables['startTest'], 'true')) env: diff --git a/tools/pipelines/templates/include-use-node-version.yml b/tools/pipelines/templates/include-use-node-version.yml index 12dcbdd24c2c..2ff8820ada42 100644 --- a/tools/pipelines/templates/include-use-node-version.yml +++ b/tools/pipelines/templates/include-use-node-version.yml @@ -14,7 +14,7 @@ steps: displayName: Set node version inputs: targetType: 'inline' - workingDirectory: $(Build.SourcesDirectory) + workingDirectory: $(Pipeline.Workspace) script: | set -eu -o pipefail diff --git a/tools/pipelines/templates/include-vars-docker.yml b/tools/pipelines/templates/include-vars-docker.yml index 0ccd19e0dc4f..836376a05106 100644 --- a/tools/pipelines/templates/include-vars-docker.yml +++ b/tools/pipelines/templates/include-vars-docker.yml @@ -46,3 +46,15 @@ variables: - ${{ else }}: - name: containerTag value: $(buildContainerName):$(containerTagSuffix) +# ADO changes the value of Build.SourcesDirectory depending on whether 1 vs. many repositories are checked out. +# Using 's' is consistent with the behavior when multiple repositories are checked out and allows for more +# consistent build scripts. See this documentation for more details: +# https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path +# A relative path is required in some places (like "checkout" steps). To get the absolute path, concatenate it +# with the predefined variable "Pipeline.Workspace", e.g.: '$(Pipeline.Workspace)/${{ variables.FluidFrameworkDirectory }}' +- name: consistentSourcesDirectory + value: 's' +- name: FluidFrameworkDirectory + value: '${{ variables.consistentSourcesDirectory }}/FluidFramework' +- name: FFPipelineHostDirectory + value: '${{ variables.consistentSourcesDirectory }}/ff_pipeline_host' diff --git a/tools/pipelines/templates/include-vars-telemetry-generator.yml b/tools/pipelines/templates/include-vars-telemetry-generator.yml index beda17245e7c..0aabcfd2a413 100644 --- a/tools/pipelines/templates/include-vars-telemetry-generator.yml +++ b/tools/pipelines/templates/include-vars-telemetry-generator.yml @@ -18,12 +18,25 @@ variables: value: $(pathToTelemetryGenerator)/node_modules/@ff-internal/telemetry-generator/dist/handlers readonly: true +# ADO changes the value of Build.SourcesDirectory depending on whether 1 vs. many repositories are checked out. +# Using 's' is consistent with the behavior when multiple repositories are checked out and allows for more +# consistent build scripts. See this documentation for more details: +# https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path +# A relative path is required in some places (like "checkout" steps). To get the absolute path, concatenate it +# with the predefined variable "Pipeline.Workspace", e.g.: '$(Pipeline.Workspace)/${{ variables.FluidFrameworkDirectory }}' +- name: consistentSourcesDirectory + value: 's' +- name: FluidFrameworkDirectory + value: '${{ variables.consistentSourcesDirectory }}/FluidFramework' +- name: FFPipelineHostDirectory + value: '${{ variables.consistentSourcesDirectory }}/ff_pipeline_host' + # Same semantics as above two variables, but for pipelines that use the new ff_pipeline_host repository setup # for shared internal dependencies. - name: pathToTelemetryGeneratorNew - value: $(Build.SourcesDirectory)/node_modules/@ff-internal/telemetry-generator + value: $(Pipeline.Workspace)/$(FFPipelineHostDirectory)/node_modules/@ff-internal/telemetry-generator readonly: true - name: pathToTelemetryGeneratorHandlersNew - value: $(Build.SourcesDirectory)/node_modules/@ff-internal/telemetry-generator/dist/handlers + value: $(Pipeline.Workspace)/$(FFPipelineHostDirectory)/node_modules/@ff-internal/telemetry-generator/dist/handlers readonly: true diff --git a/tools/pipelines/templates/include-vars.yml b/tools/pipelines/templates/include-vars.yml index 7ffbb8a46212..fff83a2da8c8 100644 --- a/tools/pipelines/templates/include-vars.yml +++ b/tools/pipelines/templates/include-vars.yml @@ -29,6 +29,20 @@ variables: # TODO: AB#45217 - name: skipComponentGovernanceDetection value: true + +# ADO changes the value of Build.SourcesDirectory depending on whether 1 vs. many repositories are checked out. +# Using 's' is consistent with the behavior when multiple repositories are checked out and allows for more +# consistent build scripts. See this documentation for more details: +# https://learn.microsoft.com/en-us/azure/devops/pipelines/repos/multi-repo-checkout?view=azure-devops#checkout-path +# A relative path is required in some places (like "checkout" steps). To get the absolute path, concatenate it +# with the predefined variable "Pipeline.Workspace", e.g.: '$(Pipeline.Workspace)/${{ variables.FluidFrameworkDirectory }}' +- name: consistentSourcesDirectory + value: 's' +- name: FluidFrameworkDirectory + value: '${{ variables.consistentSourcesDirectory }}/FluidFramework' +- name: FFPipelineHostDirectory + value: '${{ variables.consistentSourcesDirectory }}/ff_pipeline_host' + - name: testBuild value: ${{ lower(startsWith(variables['Build.SourceBranch'], 'refs/heads/test/')) }} - name: shouldPublish diff --git a/tools/pipelines/templates/upload-dev-manifest.yml b/tools/pipelines/templates/upload-dev-manifest.yml index b737afe687d5..ff2e41822c70 100644 --- a/tools/pipelines/templates/upload-dev-manifest.yml +++ b/tools/pipelines/templates/upload-dev-manifest.yml @@ -13,6 +13,8 @@ jobs: - name: version value: $[stageDependencies.build.build.outputs['SetVersion.version']] steps: + - checkout: self + path: $(FluidFrameworkDirectory) - template: /tools/pipelines/templates/include-use-node-version.yml@self - template: /tools/pipelines/templates/include-install-build-tools.yml@self parameters: @@ -22,7 +24,7 @@ jobs: displayName: Generate release reports inputs: targetType: 'inline' - workingDirectory: ${{ parameters.buildDirectory }} + workingDirectory: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} script: | set -eu -o pipefail mkdir generate_release_reports @@ -32,7 +34,7 @@ jobs: displayName: Update release report version inputs: targetType: 'inline' - workingDirectory: ${{ parameters.buildDirectory }} + workingDirectory: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} script: | set -eu -o pipefail mkdir upload_release_reports @@ -41,7 +43,7 @@ jobs: - task: CopyFiles@2 displayName: Copy release reports inputs: - SourceFolder: ${{ parameters.buildDirectory }}/upload_release_reports + SourceFolder: $(Pipeline.Workspace)/${{ parameters.buildDirectory }}/upload_release_reports TargetFolder: $(Build.ArtifactStagingDirectory)/release_reports - task: AzureCLI@2 @@ -50,7 +52,7 @@ jobs: inputs: azureSubscription: 'fluid-docs' scriptType: bash - workingDirectory: ${{ parameters.buildDirectory }} + workingDirectory: $(Pipeline.Workspace)/${{ parameters.buildDirectory }} scriptLocation: inlineScript inlineScript: | for file in upload_release_reports/*; do diff --git a/tools/pipelines/templates/upload-telemetry/include-stage-upload-telemetry.yml b/tools/pipelines/templates/upload-telemetry/include-stage-upload-telemetry.yml index 0da427e4b653..d31987dc42a4 100644 --- a/tools/pipelines/templates/upload-telemetry/include-stage-upload-telemetry.yml +++ b/tools/pipelines/templates/upload-telemetry/include-stage-upload-telemetry.yml @@ -42,12 +42,13 @@ stages: steps: - checkout: ff_pipeline_host + path: $(FFPipelineHostDirectory) - template: /tools/pipelines/templates/include-use-node-version.yml@self - template: /tools/pipelines/templates/include-install-pnpm.yml@self parameters: - buildDirectory: $(Build.SourcesDirectory) + buildDirectory: $(Pipeline.Workspace)/$(FFPipelineHostDirectory) primaryRegistry: $(ado-feeds-ff-download-only) - task: Bash@3 diff --git a/tools/pipelines/templates/upload-telemetry/include-steps-upload-stage-timing.yml b/tools/pipelines/templates/upload-telemetry/include-steps-upload-stage-timing.yml index aa92f0a53004..ce95aa04bfb9 100644 --- a/tools/pipelines/templates/upload-telemetry/include-steps-upload-stage-timing.yml +++ b/tools/pipelines/templates/upload-telemetry/include-steps-upload-stage-timing.yml @@ -47,7 +47,7 @@ steps: WORK_FOLDER: $(Agent.TempDirectory)/stageTimingAndResult inputs: targetType: 'inline' - workingDirectory: $(Build.SourcesDirectory) + workingDirectory: $(Pipeline.Workspace)/$(FFPipelineHostDirectory) script: | set -eu -o pipefail diff --git a/tools/pipelines/templates/upload-telemetry/include-steps-upload-test-pass-rate.yml b/tools/pipelines/templates/upload-telemetry/include-steps-upload-test-pass-rate.yml index c963d70b8cb8..133f79ad429a 100644 --- a/tools/pipelines/templates/upload-telemetry/include-steps-upload-test-pass-rate.yml +++ b/tools/pipelines/templates/upload-telemetry/include-steps-upload-test-pass-rate.yml @@ -30,7 +30,7 @@ steps: script: | set -eu -o pipefail echo "Fetching test pass rate data and saving into JSON files" - node "$(Build.SourcesDirectory)/scripts/get-test-pass-rate.mjs" + node "$(Pipeline.Workspace)/$(FFPipelineHostDirectory)/scripts/get-test-pass-rate.mjs" - task: Bash@3 displayName: Submit telemetry for test pass rate @@ -41,7 +41,7 @@ steps: WORK_FOLDER: $(Agent.TempDirectory)/stageTestPassRate inputs: targetType: 'inline' - workingDirectory: $(Build.SourcesDirectory) + workingDirectory: $(Pipeline.Workspace)/$(FFPipelineHostDirectory) script: | set -eu -o pipefail echo "Listing files in '$WORK_FOLDER'" diff --git a/tools/pipelines/test-perf-benchmarks.yml b/tools/pipelines/test-perf-benchmarks.yml index 60e256296b86..852994bda7ce 100644 --- a/tools/pipelines/test-perf-benchmarks.yml +++ b/tools/pipelines/test-perf-benchmarks.yml @@ -71,7 +71,7 @@ variables: value: 1 readonly: true - name: testWorkspace - value: $(Build.SourcesDirectory) + value: $(Pipeline.Workspace)/$(FFPipelineHostDirectory) readonly: true - name: testFilesPath value: $(Pipeline.Workspace)/test-files @@ -149,7 +149,7 @@ stages: - template: /tools/pipelines/templates/include-copy-dev-dependencies.yml@self parameters: sourcePackageLocation: ${{ variables.testWorkspace }}/node_modules/${{ testPackage }} - destPackageLocation: $(Build.SourcesDirectory) + destPackageLocation: $(Pipeline.Workspace)/$(FFPipelineHostDirectory) # Run tests - task: Npm@1 @@ -276,7 +276,7 @@ stages: - template: /tools/pipelines/templates/include-copy-dev-dependencies.yml@self parameters: sourcePackageLocation: ${{ variables.testWorkspace }}/node_modules/${{ testPackage }} - destPackageLocation: $(Build.SourcesDirectory) + destPackageLocation: $(Pipeline.Workspace)/$(FFPipelineHostDirectory) # Run tests - task: Npm@1 @@ -388,7 +388,7 @@ stages: - template: /tools/pipelines/templates/include-copy-dev-dependencies.yml@self parameters: sourcePackageLocation: ${{ variables.testWorkspace }}/node_modules/${{ testPackage }} - destPackageLocation: $(Build.SourcesDirectory) + destPackageLocation: $(Pipeline.Workspace)/$(FFPipelineHostDirectory) - task: Npm@1 displayName: Run custom data performance tests - ${{ testPackage }} @@ -492,7 +492,7 @@ stages: - template: /tools/pipelines/templates/include-copy-dev-dependencies.yml@self parameters: sourcePackageLocation: ${{ variables.testWorkspace }}/node_modules/$(testPackage) - destPackageLocation: $(Build.SourcesDirectory) + destPackageLocation: $(Pipeline.Workspace)/$(FFPipelineHostDirectory) # We run both types of tests in the same bash step so we can make sure to run the second set even if the first # one fails.