Skip to content

Commit

Permalink
Update SuperPMI CI automation (#60375)
Browse files Browse the repository at this point in the history
* Update SuperPMI CI automation

A number of changes:
1. Move the SuperPMI replay pipeline to the public instance, so it can be
triggered by GitHub PRs.
2. Rename SuperPMI collection scripts to contain "collect" in their names,
to distinguish them from the "replay" scripts.
3. Remove a lot of unused copy/paste cruft
4. Create a new azdo_pipelines_util.py script for all the CI scripts to depend
on, so they don't import the superpmi.py script, or each other.
5. Some changes to simplify the Python imports and be more consistent in imported
API usage.

* Fix python

* Fix python names

* For testing, upload spmi collection to "test_collect" location

Don't overwrite the existing collection
  • Loading branch information
BruceForstall authored Oct 14, 2021
1 parent 56d807d commit feed66d
Show file tree
Hide file tree
Showing 17 changed files with 292 additions and 323 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ jobs:

- template: /eng/pipelines/common/platform-matrix.yml
parameters:
jobTemplate: /eng/pipelines/coreclr/templates/superpmi-job.yml
jobTemplate: /eng/pipelines/coreclr/templates/superpmi-collect-job.yml
buildConfig: checked
platforms:
# Linux tests are built on the OSX machines.
Expand All @@ -79,7 +79,7 @@ jobs:

- template: /eng/pipelines/common/platform-matrix.yml
parameters:
jobTemplate: /eng/pipelines/coreclr/templates/superpmi-job.yml
jobTemplate: /eng/pipelines/coreclr/templates/superpmi-collect-job.yml
buildConfig: checked
platforms:
# Linux tests are built on the OSX machines.
Expand All @@ -101,7 +101,7 @@ jobs:

- template: /eng/pipelines/common/platform-matrix.yml
parameters:
jobTemplate: /eng/pipelines/coreclr/templates/superpmi-job.yml
jobTemplate: /eng/pipelines/coreclr/templates/superpmi-collect-job.yml
buildConfig: checked
platforms:
# Linux tests are built on the OSX machines.
Expand All @@ -123,7 +123,7 @@ jobs:

- template: /eng/pipelines/common/platform-matrix.yml
parameters:
jobTemplate: /eng/pipelines/coreclr/templates/superpmi-job.yml
jobTemplate: /eng/pipelines/coreclr/templates/superpmi-collect-job.yml
buildConfig: checked
platforms:
# Linux tests are built on the OSX machines.
Expand All @@ -144,7 +144,7 @@ jobs:

- template: /eng/pipelines/common/platform-matrix.yml
parameters:
jobTemplate: /eng/pipelines/coreclr/templates/superpmi-job.yml
jobTemplate: /eng/pipelines/coreclr/templates/superpmi-collect-job.yml
buildConfig: checked
platforms:
# Linux tests are built on the OSX machines.
Expand Down
4 changes: 0 additions & 4 deletions eng/pipelines/coreclr/superpmi-replay.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,6 @@ trigger:
- src/coreclr/jit/*
- src/coreclr/inc/jiteeversionguid.h

# This pipeline is supposed to be run only on merged changes
# and should not be triggerable from a PR.
pr: none

jobs:

- template: /eng/pipelines/common/platform-matrix.yml
Expand Down
8 changes: 2 additions & 6 deletions eng/pipelines/coreclr/templates/jit-exploratory-job.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ parameters:
archType: ''
osGroup: ''
osSubgroup: ''
container: ''
runtimeVariant: ''
testGroup: ''
framework: net6.0 # Specify the appropriate framework when running release branches (ie netcoreapp3.0 for release/3.0)
Expand All @@ -13,8 +12,6 @@ parameters:
runtimeType: 'coreclr'
pool: ''
codeGenType: 'JIT'
projetFile: ''
runKind: ''
runJobTemplate: '/eng/pipelines/coreclr/templates/jit-run-exploratory-job.yml'
additionalSetupParameters: ''

Expand All @@ -27,8 +24,8 @@ jobs:
- template: ${{ parameters.runJobTemplate }}
parameters:
# Compute job name from template parameters
jobName: ${{ format('exploratory_{0}{1}_{2}_{3}_{4}_{5}_{6}', parameters.osGroup, parameters.osSubgroup, parameters.archType, parameters.buildConfig, parameters.runtimeType, parameters.codeGenType, parameters.runKind) }}
displayName: ${{ format('Exploratory {0}{1} {2} {3} {4} {5} {6}', parameters.osGroup, parameters.osSubgroup, parameters.archType, parameters.buildConfig, parameters.runtimeType, parameters.codeGenType, parameters.runKind) }}
jobName: ${{ format('exploratory_{0}{1}_{2}_{3}_{4}_{5}', parameters.osGroup, parameters.osSubgroup, parameters.archType, parameters.buildConfig, parameters.runtimeType, parameters.codeGenType) }}
displayName: ${{ format('Exploratory {0}{1} {2} {3} {4} {5}', parameters.osGroup, parameters.osSubgroup, parameters.archType, parameters.buildConfig, parameters.runtimeType, parameters.codeGenType) }}
pool: ${{ parameters.pool }}
buildConfig: ${{ parameters.buildConfig }}
archType: ${{ parameters.archType }}
Expand All @@ -38,7 +35,6 @@ jobs:
liveLibrariesBuildConfig: ${{ parameters.liveLibrariesBuildConfig }}
runtimeType: ${{ parameters.runtimeType }}
codeGenType: ${{ parameters.codeGenType }}
runKind: ${{ parameters.runKind }}
testGroup: ${{ parameters.testGroup }}
helixQueues: ${{ parameters.helixQueues }}
additionalSetupParameters: ${{ parameters.additionalSetupParameters }}
Expand Down
11 changes: 2 additions & 9 deletions eng/pipelines/coreclr/templates/jit-run-exploratory-job.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,14 @@ parameters:
archType: '' # required -- targeting CPU architecture
osGroup: '' # required -- operating system for the job
osSubgroup: '' # optional -- operating system subgroup
extraSetupParameters: '' # optional -- extra arguments to pass to the setup script
frameworks: ['net6.0'] # optional -- list of frameworks to run against
continueOnError: 'false' # optional -- determines whether to continue the build if the step errors
dependsOn: '' # optional -- dependencies of the job
timeoutInMinutes: 320 # optional -- timeout for the job
enableTelemetry: false # optional -- enable for telemetry
liveLibrariesBuildConfig: '' # optional -- live-live libraries configuration to use for the run
runtimeType: 'coreclr' # optional -- Sets the runtime as coreclr or mono
codeGenType: 'JIT' # optional -- Decides on the codegen technology if running on mono
runKind: '' # required -- test category
helixQueues: '' # required -- Helix queue
helixQueues: '' # required -- Helix queues
dependOnEvaluatePaths: false

jobs:
Expand Down Expand Up @@ -87,16 +84,12 @@ jobs:
value: '$(Build.SourcesDirectory)/artifacts/issues_summary/'
- name: AntigenLogsLocation
value: '$(Build.SourcesDirectory)/artifacts/antigen_logs/'

workspace:
clean: all
pool:
${{ parameters.pool }}
container: ${{ parameters.container }}
strategy:
matrix:
${{ each framework in parameters.frameworks }}:
${{ framework }}:
_Framework: ${{ framework }}
steps:
- ${{ parameters.steps }}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,11 @@ parameters:
archType: '' # required -- targeting CPU architecture
osGroup: '' # required -- operating system for the job
osSubgroup: '' # optional -- operating system subgroup
extraSetupParameters: '' # optional -- extra arguments to pass to the setup script
frameworks: ['netcoreapp3.0'] # optional -- list of frameworks to run against
continueOnError: 'false' # optional -- determines whether to continue the build if the step errors
dependsOn: '' # optional -- dependencies of the job
timeoutInMinutes: 320 # optional -- timeout for the job
enableTelemetry: false # optional -- enable for telemetry
liveLibrariesBuildConfig: '' # optional -- live-live libraries configuration to use for the run
runtimeType: 'coreclr' # optional -- Sets the runtime as coreclr or mono
codeGenType: 'JIT' # optional -- Decides on the codegen technology if running on mono
runKind: '' # required -- test category
collectionType: ''
collectionName: ''
dependOnEvaluatePaths: false
Expand Down Expand Up @@ -108,15 +103,10 @@ jobs:
pool:
${{ parameters.pool }}
container: ${{ parameters.container }}
strategy:
matrix:
${{ each framework in parameters.frameworks }}:
${{ framework }}:
_Framework: ${{ framework }}
steps:
- ${{ parameters.steps }}

- script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi_setup.py -source_directory $(Build.SourcesDirectory) -core_root_directory $(Core_Root_Dir) -arch $(archType) -mch_file_tag $(MchFileTag) -input_directory $(InputDirectory) -collection_name $(CollectionName) -collection_type $(CollectionType) -max_size 50 # size in MB
- script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi_collect_setup.py -source_directory $(Build.SourcesDirectory) -core_root_directory $(Core_Root_Dir) -arch $(archType) -mch_file_tag $(MchFileTag) -input_directory $(InputDirectory) -collection_name $(CollectionName) -collection_type $(CollectionType) -max_size 50 # size in MB
displayName: ${{ format('SuperPMI setup ({0})', parameters.osGroup) }}

# Create required directories for merged mch collection and superpmi logs
Expand All @@ -135,23 +125,23 @@ jobs:
- template: /eng/pipelines/coreclr/templates/superpmi-send-to-helix.yml
parameters:
HelixSource: '$(HelixSourcePrefix)/$(Build.Repository.Name)/$(Build.SourceBranch)' # sources must start with pr/, official/, prodcon/, or agent/
HelixType: 'test/superpmi/$(CollectionName)/$(CollectionType)/$(_Framework)/$(Architecture)'
HelixType: 'test/superpmi/$(CollectionName)/$(CollectionType)/$(Architecture)'
HelixAccessToken: $(HelixApiAccessToken)
HelixTargetQueues: $(Queue)
HelixPreCommands: $(HelixPreCommand)
Creator: $(Creator)
WorkItemTimeout: 4:00 # 4 hours
WorkItemDirectory: '$(WorkItemDirectory)'
CorrelationPayloadDirectory: '$(CorrelationPayloadDirectory)'
ProjectFile: 'superpmi.proj'
ProjectFile: 'superpmi-collect.proj'
BuildConfig: ${{ parameters.buildConfig }}
osGroup: ${{ parameters.osGroup }}
InputArtifacts: '$(InputArtifacts)'
CollectionType: '$(CollectionType)'
CollectionName: '$(CollectionName)'
continueOnError: true # Run the future step i.e. merge-mch step even if this step fails.

# Always run merged step even if collection of some partition fails so we can store collection
# Always run merge step even if collection of some partition fails so we can store collection
# of the partitions that succeeded. If all the partitions fail, merge-mch would fail and we won't
# run future steps like uploading superpmi collection.
- script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi.py merge-mch -log_level DEBUG -pattern $(MchFilesLocation)$(CollectionName).$(CollectionType)*.mch -output_mch_path $(MergedMchFileLocation)$(CollectionName).$(CollectionType).$(MchFileTag).mch
Expand All @@ -166,10 +156,10 @@ jobs:
archiveType: $(archiveType)
tarCompression: $(tarCompression)
archiveExtension: $(archiveExtension)
artifactName: 'SuperPMI_Collection_$(CollectionName)_$(CollectionType)_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)_${{ parameters.runtimeType }}_${{ parameters.codeGenType }}_${{ parameters.runKind }}'
artifactName: 'SuperPMI_Collection_$(CollectionName)_$(CollectionType)_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
displayName: ${{ format('Upload artifacts SuperPMI {0}-{1} collection', parameters.collectionName, parameters.collectionType) }}

- script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi.py upload -log_level DEBUG -arch $(archType) -build_type $(buildConfig) -mch_files $(MergedMchFileLocation)$(CollectionName).$(CollectionType).$(MchFileTag).mch -core_root $(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).x64.$(buildConfigUpper)
- script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi.py upload -jit_ee_version test_collect -log_level DEBUG -arch $(archType) -build_type $(buildConfig) -mch_files $(MergedMchFileLocation)$(CollectionName).$(CollectionType).$(MchFileTag).mch -core_root $(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).x64.$(buildConfigUpper)
displayName: ${{ format('Upload SuperPMI {0}-{1} collection to Azure Storage', parameters.collectionName, parameters.collectionType) }}
env:
CLRJIT_AZ_KEY: $(clrjit_key1) # secret key stored as variable in pipeline
Expand All @@ -184,15 +174,15 @@ jobs:
condition: always()

- task: PublishPipelineArtifact@1
displayName: Publish Superpmi logs
displayName: Publish SuperPMI logs
inputs:
targetPath: $(SpmiLogsLocation)
artifactName: 'SuperPMI_Logs_$(CollectionName)_$(CollectionType)_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)_${{ parameters.runtimeType }}_${{ parameters.codeGenType }}_${{ parameters.runKind }}'
artifactName: 'SuperPMI_Logs_$(CollectionName)_$(CollectionType)_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
condition: always()

- task: PublishPipelineArtifact@1
displayName: Publish SuperPMI build logs
inputs:
targetPath: $(Build.SourcesDirectory)/artifacts/log
artifactName: 'SuperPMI_BuildLogs_$(CollectionName)_$(CollectionType)_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)_${{ parameters.runtimeType }}_${{ parameters.codeGenType }}_${{ parameters.runKind }}'
artifactName: 'SuperPMI_BuildLogs_$(CollectionName)_$(CollectionType)_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
condition: always()
37 changes: 13 additions & 24 deletions eng/pipelines/coreclr/templates/run-superpmi-replay-job.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,18 +9,12 @@ parameters:
archType: '' # required -- targeting CPU architecture
osGroup: '' # required -- operating system for the job
osSubgroup: '' # optional -- operating system subgroup
extraSetupParameters: '' # optional -- extra arguments to pass to the setup script
frameworks: ['netcoreapp3.0'] # optional -- list of frameworks to run against
continueOnError: 'false' # optional -- determines whether to continue the build if the step errors
dependsOn: '' # optional -- dependencies of the job
timeoutInMinutes: 320 # optional -- timeout for the job
enableTelemetry: false # optional -- enable for telemetry
liveLibrariesBuildConfig: '' # optional -- live-live libraries configuration to use for the run
runtimeType: 'coreclr' # optional -- Sets the runtime as coreclr or mono
codeGenType: 'JIT' # optional -- Decides on the codegen technology if running on mono
runKind: '' # required -- test category
collectionType: ''
collectionName: ''
helixQueues: '' # required -- Helix queues
dependOnEvaluatePaths: false

jobs:
Expand All @@ -35,8 +29,6 @@ jobs:
enableTelemetry: ${{ parameters.enableTelemetry }}
enablePublishBuildArtifacts: true
continueOnError: ${{ parameters.continueOnError }}
collectionType: $ {{ parameters.collectionType }}
collectionName: ${{ parameters.collectionName }}
dependOnEvaluatePaths: ${{ parameters.dependOnEvaluatePaths }}
timeoutInMinutes: ${{ parameters.timeoutInMinutes }}

Expand All @@ -46,6 +38,7 @@ jobs:
displayName: '${{ parameters.jobName }}'

variables:

- ${{ each variable in parameters.variables }}:
- ${{ if ne(variable.name, '') }}:
- name: ${{ variable.name }}
Expand All @@ -69,11 +62,6 @@ jobs:
pool:
${{ parameters.pool }}
container: ${{ parameters.container }}
strategy:
matrix:
${{ each framework in parameters.frameworks }}:
${{ framework }}:
_Framework: ${{ framework }}
steps:
- ${{ parameters.steps }}

Expand All @@ -85,17 +73,18 @@ jobs:
displayName: ${{ format('SuperPMI replay setup ({0} {1})', parameters.osGroup, parameters.archType) }}

# Run superpmi replay in helix
- template: /eng/pipelines/coreclr/templates/superpmi-send-to-helix.yml
- template: /eng/pipelines/common/templates/runtimes/send-to-helix-step.yml
parameters:
HelixSource: '$(HelixSourcePrefix)/$(Build.Repository.Name)/$(Build.SourceBranch)' # sources must start with pr/, official/, prodcon/, or agent/
HelixAccessToken: $(HelixApiAccessToken)
HelixTargetQueues: $(Queue)
HelixPreCommands: $(HelixPreCommand)
Creator: $(Creator)
displayName: 'Send job to Helix'
helixBuild: $(Build.BuildNumber)
helixSource: $(_HelixSource)
helixType: 'build/tests/'
helixQueues: ${{ join(',', parameters.helixQueues) }}
creator: dotnet-bot
WorkItemTimeout: 4:00 # 4 hours
WorkItemDirectory: '$(WorkItemDirectory)'
CorrelationPayloadDirectory: '$(CorrelationPayloadDirectory)'
ProjectFile: 'superpmi-replay.proj'
helixProjectArguments: '$(Build.SourcesDirectory)/src/coreclr/scripts/superpmi-replay.proj'
BuildConfig: ${{ parameters.buildConfig }}
osGroup: ${{ parameters.osGroup }}
archType: ${{ parameters.archType }}
Expand All @@ -111,7 +100,7 @@ jobs:
condition: always()

- task: PublishPipelineArtifact@1
displayName: Publish Superpmi logs
displayName: Publish SuperPMI logs
inputs:
targetPath: $(SpmiLogsLocation)
artifactName: 'SuperPMI_Logs_$(archType)_$(buildConfig)'
Expand All @@ -121,5 +110,5 @@ jobs:
displayName: Publish SuperPMI build logs
inputs:
targetPath: $(Build.SourcesDirectory)/artifacts/log
artifactName: 'SuperPMI_BuildLogs__$(archType)_$(buildConfig)'
condition: always()
artifactName: 'SuperPMI_BuildLogs_$(archType)_$(buildConfig)'
condition: always()
Loading

0 comments on commit feed66d

Please sign in to comment.