Skip to content

Commit

Permalink
Merge pull request #588 from mlcommons/mlperf-inference
Browse files Browse the repository at this point in the history
Sync main <- Mlperf inference for November 2024 release
  • Loading branch information
arjunsuresh authored Nov 22, 2024
2 parents b62a227 + 6eefe2f commit 4d5265a
Show file tree
Hide file tree
Showing 4 changed files with 58 additions and 3 deletions.
33 changes: 32 additions & 1 deletion .github/workflows/test-cm-based-submission-generation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,29 @@ jobs:
python-version: [ "3.12" ]
division: ["closed", "open", "closed-open"]
category: ["datacenter", "edge"]
case: ["closed", "closed-no-compliance", "closed-power", "closed-failed-power-logs", "case-3", "case-7", "case-8"]
case: ["closed", "closed-no-compliance", "closed-power", "closed-failed-power-logs", "case-1", "case-2", "case-3", "case-5", "case-6", "case-7", "case-8"]
action: ["run", "docker"]
exclude:
- os: macos-latest
- os: windows-latest
- category: "edge"
- case: case-1
division: closed
- case: case-1
division: closed-open
- case: case-2
division: closed
- case: case-2
division: closed-open
- case: case-5
division: closed
- case: case-5
division: closed-open
- case: case-6
division: closed
- case: case-6
division: closed-open

steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
Expand All @@ -39,6 +56,7 @@ jobs:
run: |
git clone -b submission-generation-tests https://github.com/mlcommons/inference.git submission_generation_tests
- name: Run Submission Generation - ${{ matrix.case }} ${{ matrix.action }} ${{ matrix.category }} ${{ matrix.division }}
continue-on-error: true
run: |
if [ "${{ matrix.case }}" == "case-3" ]; then
description="Submission generation (model_mapping.json not present but model name matches with official one)"
Expand All @@ -63,5 +81,18 @@ jobs:
# Dynamically set the log group to simulate a dynamic step name
echo "::group::$description"
cm ${{ matrix.action }} script --tags=generate,inference,submission --adr.submission-checker-src.tags=_branch.dev --clean --preprocess_submission=yes --results_dir=$PWD/submission_generation_tests/${{ matrix.case }}/ --run-checker --submitter=MLCommons --tar=yes --env.CM_TAR_OUTFILE=submission.tar.gz --division=${{ matrix.division }} --env.CM_DETERMINE_MEMORY_CONFIGURATION=yes --quiet $extra_run_args
exit_status=$?
echo "Exit status for the job ${description} ${exit_status}"
if [[ "${{ matrix.case }}" == "case-5" || "${{ matrix.case }}" == "case-6" ]]; then
# For cases 5 and 6, exit status should be 0 if cm command fails, 1 if it succeeds
if [[ ${exit_status} -ne 0 ]]; then
exit 0
else
exit ${exit_status}
fi
else
# For other cases, exit with the original status
test ${exit_status} -eq 0 || exit ${exit_status}
fi
echo "::endgroup::"
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.4.0
0.4.1
1 change: 1 addition & 0 deletions script/generate-mlperf-inference-submission/_cm.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ input_mapping:
submitter: CM_MLPERF_SUBMITTER
sw_notes_extra: CM_MLPERF_SUT_SW_NOTES_EXTRA
tar: CM_TAR_SUBMISSION_DIR
get_platform_details: CM_GET_PLATFORM_DETAILS
post_deps:
- enable_if_env:
CM_RUN_MLPERF_ACCURACY:
Expand Down
25 changes: 24 additions & 1 deletion script/generate-mlperf-inference-submission/customize.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,6 +240,7 @@ def generate_submission(env, state, inp, submission_division):
compliance_path = os.path.join(path_submission, "compliance", sub_res)
system_path = os.path.join(path_submission, "systems")
submission_system_path = system_path

if not os.path.isdir(submission_system_path):
os.makedirs(submission_system_path)
system_file = os.path.join(submission_system_path, sub_res+".json")
Expand Down Expand Up @@ -273,6 +274,8 @@ def generate_submission(env, state, inp, submission_division):

print('* MLPerf inference model: {}'.format(model))
for scenario in scenarios:
# the system_info.txt is copied from the mode directory if found, else it would be looked under scenario directory
system_info_from_mode = False
results[model][scenario] = {}
result_scenario_path = os.path.join(result_model_path, scenario)
submission_scenario_path = os.path.join(submission_model_path, scenario)
Expand Down Expand Up @@ -429,6 +432,8 @@ def generate_submission(env, state, inp, submission_division):
elif f in [ "README.md", "README-extra.md", "cm-version-info.json", "os_info.json", "cpu_info.json", "pip_freeze.json", "system_info.txt", "cm-deps.png", "cm-deps.mmd" ] and mode == "performance":
shutil.copy(os.path.join(result_mode_path, f), os.path.join(submission_measurement_path, f))
if f == "system_info.txt" and not platform_info_file:
# the first found system_info.txt will be taken as platform info file for a specific model to be placed in
# measurements-model folder when generating the final submission
platform_info_file = os.path.join(result_mode_path, f)
elif f in [ "console.out" ]:
shutil.copy(os.path.join(result_mode_path, f), os.path.join(submission_measurement_path, mode+"_"+f))
Expand All @@ -445,6 +450,9 @@ def generate_submission(env, state, inp, submission_division):
p_target = os.path.join(submission_results_path, f)
shutil.copy(os.path.join(result_mode_path, f), p_target)

if os.path.exists(os.path.join(result_scenario_path, "system_info.txt")):
shutil.copy(os.path.join(result_scenario_path, "system_info.txt"), os.path.join(submission_measurement_path, f))
platform_info_file = os.path.join(result_scenario_path, "system_info.txt")

readme_file = os.path.join(submission_measurement_path, "README.md")
if not os.path.exists(readme_file):
Expand All @@ -459,24 +467,39 @@ def generate_submission(env, state, inp, submission_division):
with open(readme_file, mode='a') as f:
f.write(result_string)

#Copy system_info.txt to the submission measurements model folder if any scenario performance run has it
# Copy system_info.txt to the submission measurements model folder if any scenario performance run has it
sys_info_file = None

if os.path.exists(os.path.join(result_model_path, "system_info.txt")):
sys_info_file = os.path.join(result_model_path, "system_info.txt")
elif platform_info_file:
sys_info_file = platform_info_file

if sys_info_file:
model_platform_info_file = sys_info_file
shutil.copy(sys_info_file, os.path.join(measurement_model_path, "system_info.txt"))

#Copy system_info.txt to the submission measurements folder if any model performance run has it
sys_info_file = None

if os.path.exists(os.path.join(result_path, "system_info.txt")):
sys_info_file = os.path.join(result_path, "system_info.txt")
elif model_platform_info_file:
sys_info_file = model_platform_info_file

if sys_info_file:
shutil.copy(sys_info_file, os.path.join(measurement_path, "system_info.txt"))
else:
if env.get('CM_GET_PLATFORM_DETAILS', '') == "yes":
cm_input = {'action': 'run',
'automation': 'script',
'tags': 'get,platform,details',
'env': {'CM_PLATFORM_DETAILS_FILE_PATH': os.path.join(measurement_path, "system_info.txt")},
'quiet': True
}
r = cmind.access(cm_input)
if r['return'] > 0:
return r


with open(system_file, "w") as fp:
Expand Down

0 comments on commit 4d5265a

Please sign in to comment.