diff --git a/.github/workflows/test-mlperf-inference-intel b/.github/workflows/test-mlperf-inference-intel index 42201bd385..b52a7ff708 100644 --- a/.github/workflows/test-mlperf-inference-intel +++ b/.github/workflows/test-mlperf-inference-intel @@ -2,7 +2,7 @@ name: MLPerf Inference Intel implementations on: schedule: - - cron: "49 2 * * *" #to be adjusted + - cron: "54 14 * * *" #to be adjusted jobs: build_nvidia: @@ -21,5 +21,5 @@ jobs: source gh_action/bin/activate export CM_REPOS=$HOME/GH_CM pip install --upgrade cm4mlops - cm run script --tags=run-mlperf,inference,_all-scenarios,_submission,_full,_r4.1-dev --preprocess_submission=yes --execution_mode=valid --pull_changes=yes --pull_inference_changes=yes --model=${{ matrix.model }} --submitter="MLCommons" --hw_name=RTX4090x2 --implementation=intel --backend=pytorch --category=datacenter --division=open --scenario=Offline --docker_dt=yes --docker_it=no --docker_cm_repo=gateoverflow@cm4mlops --adr.compiler.tags=gcc --device=cpu --results_dir=$HOME/gh_action_results --submission_dir=$HOME/gh_action_submissions --clean --docker --quiet + cm run script --tags=run-mlperf,inference,_all-scenarios,_submission,_full,_r4.1-dev --preprocess_submission=yes --execution_mode=valid --pull_changes=yes --pull_inference_changes=yes --model=${{ matrix.model }} --submitter="MLCommons" --hw_name=IntelSPR.24c --implementation=intel --backend=pytorch --category=datacenter --division=open --scenario=Offline --docker_dt=yes --docker_it=no --docker_cm_repo=gateoverflow@cm4mlops --adr.compiler.tags=gcc --device=cpu --results_dir=$HOME/gh_action_results --submission_dir=$HOME/gh_action_submissions --clean --docker --quiet cm run script --tags=push,github,mlperf,inference,submission --repo_url=https://github.com/gateoverflow/mlperf_inference_unofficial_submissions_v5.0 --repo_branch=main --commit_message="Results from GH action on SPR.24c" --quiet --submission_dir=$HOME/gh_action_submissions --hw_name=IntelSPR.24c diff --git a/script/preprocess-mlperf-inference-submission/_cm.json b/script/preprocess-mlperf-inference-submission/_cm.json index 312ab7c2f1..6e1fade381 100644 --- a/script/preprocess-mlperf-inference-submission/_cm.json +++ b/script/preprocess-mlperf-inference-submission/_cm.json @@ -31,6 +31,7 @@ } ], "input_mapping": { + "input": "CM_MLPERF_INFERENCE_SUBMISSION_DIR", "submission_dir": "CM_MLPERF_INFERENCE_SUBMISSION_DIR", "submitter": "CM_MLPERF_SUBMITTER" }, diff --git a/script/preprocess-mlperf-inference-submission/customize.py b/script/preprocess-mlperf-inference-submission/customize.py index c5669338be..474e4a42ed 100644 --- a/script/preprocess-mlperf-inference-submission/customize.py +++ b/script/preprocess-mlperf-inference-submission/customize.py @@ -11,17 +11,21 @@ def preprocess(i): submission_dir = env.get("CM_MLPERF_INFERENCE_SUBMISSION_DIR", "") if submission_dir == "": - print("Please set CM_MLPERF_INFERENCE_SUBMISSION_DIR") + print("Please set --env.CM_MLPERF_INFERENCE_SUBMISSION_DIR") return {'return': 1, 'error':'CM_MLPERF_INFERENCE_SUBMISSION_DIR is not specified'} - submitter = env.get("CM_MLPERF_SUBMITTER", "cTuning") - submission_processed = submission_dir + "_processed" + if not os.path.exists(submission_dir): + print("Please set --env.CM_MLPERF_INFERENCE_SUBMISSION_DIR to a valid submission directory") + return {'return': 1, 'error':'CM_MLPERF_INFERENCE_SUBMISSION_DIR is not existing'} + + submission_dir = submission_dir.rstrip(os.path.sep) + submitter = env.get("CM_MLPERF_SUBMITTER", "MLCommons") + submission_processed = f"{submission_dir}_processed" if os.path.exists(submission_processed): + print(f"Cleaning {submission_processed}") shutil.rmtree(submission_processed) - os.system("rm -rf " + submission_dir + "_processed") - CMD = env['CM_PYTHON_BIN'] + " '" + os.path.join(env['CM_MLPERF_INFERENCE_SOURCE'], "tools", "submission", "preprocess_submission.py") + "' --input '" + submission_dir + "' --submitter '" + submitter + "' --output '" + submission_processed + "'" env['CM_RUN_CMD'] = CMD diff --git a/script/run-mlperf-inference-app/_cm.yaml b/script/run-mlperf-inference-app/_cm.yaml index 68518baf81..0105c1c70c 100644 --- a/script/run-mlperf-inference-app/_cm.yaml +++ b/script/run-mlperf-inference-app/_cm.yaml @@ -264,6 +264,8 @@ variations: tags: _size.50,_with-sample-ids nvidia-preprocess-data: extra_cache_tags: "scc24-base" + inference-src: + tags: _branch.dev deps: - tags: clean,nvidia,scratch,_sdxl,_downloaded-data extra_cache_rm_tags: scc24-main @@ -278,6 +280,8 @@ variations: tags: _size.500,_with-sample-ids nvidia-preprocess-data: extra_cache_tags: "scc24-main" + inference-src: + tags: _branch.dev env: CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX4: scc24-main CM_DOCKER_IMAGE_NAME: scc24