Skip to content

Commit

Permalink
Merge pull request #496 from GATEOverflow/mlperf-inference
Browse files Browse the repository at this point in the history
Cleanups to MLPerf inference preprocess script
  • Loading branch information
arjunsuresh authored Nov 7, 2024
2 parents 05f4bf8 + 7a8a152 commit bdda54a
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 7 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/test-mlperf-inference-intel
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: MLPerf Inference Intel implementations

on:
schedule:
- cron: "49 2 * * *" #to be adjusted
- cron: "54 14 * * *" #to be adjusted

jobs:
build_nvidia:
Expand All @@ -21,5 +21,5 @@ jobs:
source gh_action/bin/activate
export CM_REPOS=$HOME/GH_CM
pip install --upgrade cm4mlops
cm run script --tags=run-mlperf,inference,_all-scenarios,_submission,_full,_r4.1-dev --preprocess_submission=yes --execution_mode=valid --pull_changes=yes --pull_inference_changes=yes --model=${{ matrix.model }} --submitter="MLCommons" --hw_name=RTX4090x2 --implementation=intel --backend=pytorch --category=datacenter --division=open --scenario=Offline --docker_dt=yes --docker_it=no --docker_cm_repo=gateoverflow@cm4mlops --adr.compiler.tags=gcc --device=cpu --results_dir=$HOME/gh_action_results --submission_dir=$HOME/gh_action_submissions --clean --docker --quiet
cm run script --tags=run-mlperf,inference,_all-scenarios,_submission,_full,_r4.1-dev --preprocess_submission=yes --execution_mode=valid --pull_changes=yes --pull_inference_changes=yes --model=${{ matrix.model }} --submitter="MLCommons" --hw_name=IntelSPR.24c --implementation=intel --backend=pytorch --category=datacenter --division=open --scenario=Offline --docker_dt=yes --docker_it=no --docker_cm_repo=gateoverflow@cm4mlops --adr.compiler.tags=gcc --device=cpu --results_dir=$HOME/gh_action_results --submission_dir=$HOME/gh_action_submissions --clean --docker --quiet
cm run script --tags=push,github,mlperf,inference,submission --repo_url=https://github.com/gateoverflow/mlperf_inference_unofficial_submissions_v5.0 --repo_branch=main --commit_message="Results from GH action on SPR.24c" --quiet --submission_dir=$HOME/gh_action_submissions --hw_name=IntelSPR.24c
1 change: 1 addition & 0 deletions script/preprocess-mlperf-inference-submission/_cm.json
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
}
],
"input_mapping": {
"input": "CM_MLPERF_INFERENCE_SUBMISSION_DIR",
"submission_dir": "CM_MLPERF_INFERENCE_SUBMISSION_DIR",
"submitter": "CM_MLPERF_SUBMITTER"
},
Expand Down
14 changes: 9 additions & 5 deletions script/preprocess-mlperf-inference-submission/customize.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,17 +11,21 @@ def preprocess(i):
submission_dir = env.get("CM_MLPERF_INFERENCE_SUBMISSION_DIR", "")

if submission_dir == "":
print("Please set CM_MLPERF_INFERENCE_SUBMISSION_DIR")
print("Please set --env.CM_MLPERF_INFERENCE_SUBMISSION_DIR")
return {'return': 1, 'error':'CM_MLPERF_INFERENCE_SUBMISSION_DIR is not specified'}

submitter = env.get("CM_MLPERF_SUBMITTER", "cTuning")
submission_processed = submission_dir + "_processed"
if not os.path.exists(submission_dir):
print("Please set --env.CM_MLPERF_INFERENCE_SUBMISSION_DIR to a valid submission directory")
return {'return': 1, 'error':'CM_MLPERF_INFERENCE_SUBMISSION_DIR is not existing'}

submission_dir = submission_dir.rstrip(os.path.sep)
submitter = env.get("CM_MLPERF_SUBMITTER", "MLCommons")
submission_processed = f"{submission_dir}_processed"

if os.path.exists(submission_processed):
print(f"Cleaning {submission_processed}")
shutil.rmtree(submission_processed)

os.system("rm -rf " + submission_dir + "_processed")

CMD = env['CM_PYTHON_BIN'] + " '" + os.path.join(env['CM_MLPERF_INFERENCE_SOURCE'], "tools", "submission",
"preprocess_submission.py") + "' --input '" + submission_dir + "' --submitter '" + submitter + "' --output '" + submission_processed + "'"
env['CM_RUN_CMD'] = CMD
Expand Down
4 changes: 4 additions & 0 deletions script/run-mlperf-inference-app/_cm.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -264,6 +264,8 @@ variations:
tags: _size.50,_with-sample-ids
nvidia-preprocess-data:
extra_cache_tags: "scc24-base"
inference-src:
tags: _branch.dev
deps:
- tags: clean,nvidia,scratch,_sdxl,_downloaded-data
extra_cache_rm_tags: scc24-main
Expand All @@ -278,6 +280,8 @@ variations:
tags: _size.500,_with-sample-ids
nvidia-preprocess-data:
extra_cache_tags: "scc24-main"
inference-src:
tags: _branch.dev
env:
CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX4: scc24-main
CM_DOCKER_IMAGE_NAME: scc24
Expand Down

0 comments on commit bdda54a

Please sign in to comment.