diff --git a/.azure-pipelines/code-scan-neural-insights.yaml b/.azure-pipelines/code-scan-neural-insights.yaml
index 532bcdfc581..eaf741c7ec1 100644
--- a/.azure-pipelines/code-scan-neural-insights.yaml
+++ b/.azure-pipelines/code-scan-neural-insights.yaml
@@ -57,55 +57,3 @@ stages:
codeScanFileName: "pylint"
uploadPath: "pylint.json"
scanModule: "neural_insights"
-
- - stage: CopyRight
- displayName: CopyRight Code Scan
- dependsOn: []
- jobs:
- - job: CopyRight
- displayName: CopyRight
- steps:
- - script: |
- rm -fr $(Build.SourcesDirectory) || true
- echo y | docker system prune
- displayName: "Clean workspace"
-
- - checkout: self
- displayName: "Checkout out Repo"
-
- - task: Bash@3
- inputs:
- targetType: "inline"
- script: |
- source $(Build.SourcesDirectory)/.azure-pipelines/scripts/change_color.sh
- set -e
- mkdir -p $(Build.SourcesDirectory)/$(CODE_SCAN_LOG_PATH)
- RESET="echo -en \\E[0m \\n" # close
-
- supported_extensions=(py, sh, yaml)
- git --no-pager diff --name-only $(git show-ref -s remotes/origin/$(System.PullRequest.TargetBranch)) $(Build.SourcesDirectory)/neural_insights > $(Build.SourcesDirectory)/$(CODE_SCAN_LOG_PATH)/diff.log
- files=$(cat $(Build.SourcesDirectory)/$(CODE_SCAN_LOG_PATH)/diff.log | awk '!a[$0]++')
-
- for file in ${files}
- do
- if [[ "${supported_extensions[@]}" =~ "${file##*.}" ]]; then
- if [ $(grep -E -c "Copyright \\(c\\) ([0-9]{4})(-[0-9]{4})? Intel Corporation" ${file}) = 0 ]; then
- echo ${file} >> $(Build.SourcesDirectory)/$(CODE_SCAN_LOG_PATH)/copyright_issue_summary.log
- $BOLD_YELLOW && echo " ----------------- Current log file output start --------------------------"
- cat $(Build.SourcesDirectory)/$(CODE_SCAN_LOG_PATH)/copyright_issue_summary.log
- $BOLD_YELLOW && echo " ----------------- Current log file output end --------------------------" && $RESET
- $BOLD_RED && echo "CopyRight has something wrong! Please click on the artifact button to download and view the error log!" && $RESET; exit 1
- fi
- else
- $LIGHT_PURPLE && echo "Skipping ${file}" && $RESET
- fi
- done
- displayName: "CopyRight Check"
-
- - task: PublishPipelineArtifact@1
- condition: failed()
- inputs:
- targetPath: $(Build.SourcesDirectory)/$(CODE_SCAN_LOG_PATH)/copyright_issue_summary.log
- artifact: copyright
- publishLocation: "pipeline"
- displayName: "PublishPipelineArtifact"
diff --git a/.azure-pipelines/code-scan-neural-solution.yaml b/.azure-pipelines/code-scan-neural-solution.yaml
index 77e4e54962b..301c7010465 100644
--- a/.azure-pipelines/code-scan-neural-solution.yaml
+++ b/.azure-pipelines/code-scan-neural-solution.yaml
@@ -57,55 +57,3 @@ stages:
codeScanFileName: "pylint"
uploadPath: "pylint.json"
scanModule: "neural_solution"
-
- - stage: CopyRight
- displayName: CopyRight Code Scan
- dependsOn: []
- jobs:
- - job: CopyRight
- displayName: CopyRight
- steps:
- - script: |
- rm -fr $(Build.SourcesDirectory) || true
- echo y | docker system prune
- displayName: "Clean workspace"
-
- - checkout: self
- displayName: "Checkout out Repo"
-
- - task: Bash@3
- inputs:
- targetType: "inline"
- script: |
- source $(Build.SourcesDirectory)/.azure-pipelines/scripts/change_color.sh
- set -e
- mkdir -p $(Build.SourcesDirectory)/$(CODE_SCAN_LOG_PATH)
- RESET="echo -en \\E[0m \\n" # close
-
- supported_extensions=(py, sh, yaml)
- git --no-pager diff --name-only $(git show-ref -s remotes/origin/$(System.PullRequest.TargetBranch)) $(Build.SourcesDirectory)/neural_solution > $(Build.SourcesDirectory)/$(CODE_SCAN_LOG_PATH)/diff.log
- files=$(cat $(Build.SourcesDirectory)/$(CODE_SCAN_LOG_PATH)/diff.log | awk '!a[$0]++')
-
- for file in ${files}
- do
- if [[ "${supported_extensions[@]}" =~ "${file##*.}" ]]; then
- if [ $(grep -E -c "Copyright \\(c\\) ([0-9]{4})(-[0-9]{4})? Intel Corporation" ${file}) = 0 ]; then
- echo ${file} >> $(Build.SourcesDirectory)/$(CODE_SCAN_LOG_PATH)/copyright_issue_summary.log
- $BOLD_YELLOW && echo " ----------------- Current log file output start --------------------------"
- cat $(Build.SourcesDirectory)/$(CODE_SCAN_LOG_PATH)/copyright_issue_summary.log
- $BOLD_YELLOW && echo " ----------------- Current log file output end --------------------------" && $RESET
- $BOLD_RED && echo "CopyRight has something wrong! Please click on the artifact button to download and view the error log!" && $RESET; exit 1
- fi
- else
- $LIGHT_PURPLE && echo "Skipping ${file}" && $RESET
- fi
- done
- displayName: "CopyRight Check"
-
- - task: PublishPipelineArtifact@1
- condition: failed()
- inputs:
- targetPath: $(Build.SourcesDirectory)/$(CODE_SCAN_LOG_PATH)/copyright_issue_summary.log
- artifact: copyright
- publishLocation: "pipeline"
- displayName: "PublishPipelineArtifact"
diff --git a/.azure-pipelines/code-scan.yml b/.azure-pipelines/code-scan.yml
index 4401183ab57..afc894ee034 100644
--- a/.azure-pipelines/code-scan.yml
+++ b/.azure-pipelines/code-scan.yml
@@ -56,55 +56,3 @@ stages:
parameters:
codeScanFileName: "pylint"
uploadPath: "pylint.json"
-
- - stage: CopyRight
- displayName: CopyRight Code Scan
- dependsOn: []
- jobs:
- - job: CopyRight
- displayName: CopyRight
- steps:
- - script: |
- rm -fr $(Build.SourcesDirectory) || true
- echo y | docker system prune
- displayName: "Clean workspace"
-
- - checkout: self
- displayName: "Checkout out Repo"
-
- - task: Bash@3
- inputs:
- targetType: "inline"
- script: |
- source $(Build.SourcesDirectory)/.azure-pipelines/scripts/change_color.sh
- set -e
- mkdir -p $(Build.SourcesDirectory)/$(CODE_SCAN_LOG_PATH)
- RESET="echo -en \\E[0m \\n" # close
-
- supported_extensions=(py, sh, yaml)
- git --no-pager diff --name-only $(git show-ref -s remotes/origin/$(System.PullRequest.TargetBranch)) $(Build.SourcesDirectory)/neural_compressor > $(Build.SourcesDirectory)/$(CODE_SCAN_LOG_PATH)/diff.log
- files=$(cat $(Build.SourcesDirectory)/$(CODE_SCAN_LOG_PATH)/diff.log | awk '!a[$0]++')
-
- for file in ${files}
- do
- if [[ "${supported_extensions[@]}" =~ "${file##*.}" ]]; then
- if [ $(grep -E -c "Copyright \\(c\\) ([0-9]{4})(-[0-9]{4})? Intel Corporation" ${file}) = 0 ]; then
- echo ${file} >> $(Build.SourcesDirectory)/$(CODE_SCAN_LOG_PATH)/copyright_issue_summary.log
- $BOLD_YELLOW && echo " ----------------- Current log file output start --------------------------"
- cat $(Build.SourcesDirectory)/$(CODE_SCAN_LOG_PATH)/copyright_issue_summary.log
- $BOLD_YELLOW && echo " ----------------- Current log file output end --------------------------" && $RESET
- $BOLD_RED && echo "CopyRight has something wrong! Please click on the artifact button to download and view the error log!" && $RESET; exit 1
- fi
- else
- $LIGHT_PURPLE && echo "Skipping ${file}" && $RESET
- fi
- done
- displayName: "CopyRight Check"
-
- - task: PublishPipelineArtifact@1
- condition: failed()
- inputs:
- targetPath: $(Build.SourcesDirectory)/$(CODE_SCAN_LOG_PATH)/copyright_issue_summary.log
- artifact: copyright
- publishLocation: "pipeline"
- displayName: "PublishPipelineArtifact"
diff --git a/.azure-pipelines/scripts/ut/3x/collect_log_3x.sh b/.azure-pipelines/scripts/ut/3x/collect_log_3x.sh
index 3cc75c1e85d..386ec397c81 100644
--- a/.azure-pipelines/scripts/ut/3x/collect_log_3x.sh
+++ b/.azure-pipelines/scripts/ut/3x/collect_log_3x.sh
@@ -9,7 +9,7 @@ coverage_compare="/neural-compressor/log_dir/coverage_compare.html"
cd /neural-compressor/log_dir
$BOLD_YELLOW && echo "collect coverage for PR branch" && $RESET
-cp ut_coverage_3x/.coverage /neural-compressor/
+cp ut_3x_coverage/.coverage /neural-compressor/
mkdir -p coverage_PR
cd /neural-compressor
coverage report -m --rcfile=${COVERAGE_RCFILE} | tee ${coverage_log}
@@ -20,18 +20,19 @@ ls -l log_dir/coverage_PR/htmlcov
$BOLD_YELLOW && echo "collect coverage for baseline" && $RESET
cd /neural-compressor
+cp -r /neural-compressor/.azure-pipelines .azure-pipelines-pr
git config --global --add safe.directory /neural-compressor
git fetch
git checkout master
rm -rf build dist *egg-info
echo y | pip uninstall neural_compressor_${1}
-cd /neural-compressor/.azure-pipelines/scripts && bash install_nc.sh ${1}
+cd /neural-compressor/.azure-pipelines-pr/scripts && bash install_nc.sh ${1}
coverage erase
cd /neural-compressor/log_dir
mkdir -p coverage_base
rm -rf /neural-compressor/.coverage || true
-cp ut_coverage_3x_baseline/.coverage /neural-compressor
+cp ut_3x_baseline_coverage/.coverage /neural-compressor
cd /neural-compressor
coverage report -m --rcfile=${COVERAGE_RCFILE} | tee ${coverage_log_base}
@@ -123,10 +124,10 @@ if [[ ${#fail_items[@]} -ne 0 ]]; then
$BOLD_RED && echo "Unit Test failed with ${item} coverage decrease ${decrease}%" && $RESET
done
$BOLD_RED && echo "compare coverage to give detail info" && $RESET
- bash /neural-compressor/.azure-pipelines/scripts/ut/compare_coverage.sh ${coverage_compare} ${coverage_log} ${coverage_log_base} "FAILED" ${coverage_PR_lines_rate} ${coverage_base_lines_rate} ${coverage_PR_branches_rate} ${coverage_base_branches_rate}
+ bash /neural-compressor/.azure-pipelines-pr/scripts/ut/compare_coverage.sh ${coverage_compare} ${coverage_log} ${coverage_log_base} "FAILED" ${coverage_PR_lines_rate} ${coverage_base_lines_rate} ${coverage_PR_branches_rate} ${coverage_base_branches_rate}
exit 1
else
$BOLD_GREEN && echo "Unit Test success with coverage lines: ${coverage_PR_lines_rate}%, branches: ${coverage_PR_branches_rate}%" && $RESET
$BOLD_GREEN && echo "compare coverage to give detail info" && $RESET
- bash /neural-compressor/.azure-pipelines/scripts/ut/compare_coverage.sh ${coverage_compare} ${coverage_log} ${coverage_log_base} "SUCCESS" ${coverage_PR_lines_rate} ${coverage_base_lines_rate} ${coverage_PR_branches_rate} ${coverage_base_branches_rate}
+ bash /neural-compressor/.azure-pipelines-pr/scripts/ut/compare_coverage.sh ${coverage_compare} ${coverage_log} ${coverage_log_base} "SUCCESS" ${coverage_PR_lines_rate} ${coverage_base_lines_rate} ${coverage_PR_branches_rate} ${coverage_base_branches_rate}
fi
diff --git a/.azure-pipelines/scripts/ut/3x/run_3x_ort.sh b/.azure-pipelines/scripts/ut/3x/run_3x_ort.sh
index cf5a2fbe301..5f8550ea742 100644
--- a/.azure-pipelines/scripts/ut/3x/run_3x_ort.sh
+++ b/.azure-pipelines/scripts/ut/3x/run_3x_ort.sh
@@ -21,7 +21,6 @@ mkdir -p ${LOG_DIR}
ut_log_name=${LOG_DIR}/ut_3x_ort.log
pytest --cov="${inc_path}" -vs --disable-warnings --html=report.html --self-contained-html . 2>&1 | tee -a ${ut_log_name}
-cp .coverage ${LOG_DIR}/.coverage
cp report.html ${LOG_DIR}/
if [ $(grep -c '== FAILURES ==' ${ut_log_name}) != 0 ] || [ $(grep -c '== ERRORS ==' ${ut_log_name}) != 0 ] || [ $(grep -c ' passed' ${ut_log_name}) == 0 ]; then
@@ -30,4 +29,7 @@ if [ $(grep -c '== FAILURES ==' ${ut_log_name}) != 0 ] || [ $(grep -c '== ERRORS
exit 1
fi
+# if ut pass, collect the coverage file into artifacts
+cp .coverage ${LOG_DIR}/.coverage
+
echo "UT finished successfully! "
\ No newline at end of file
diff --git a/.azure-pipelines/scripts/ut/3x/run_3x_pt.sh b/.azure-pipelines/scripts/ut/3x/run_3x_pt.sh
index 011cb563606..b91bc182c7c 100644
--- a/.azure-pipelines/scripts/ut/3x/run_3x_pt.sh
+++ b/.azure-pipelines/scripts/ut/3x/run_3x_pt.sh
@@ -21,7 +21,6 @@ mkdir -p ${LOG_DIR}
ut_log_name=${LOG_DIR}/ut_3x_pt.log
pytest --cov="${inc_path}" -vs --disable-warnings --html=report.html --self-contained-html . 2>&1 | tee -a ${ut_log_name}
-cp .coverage ${LOG_DIR}/.coverage
cp report.html ${LOG_DIR}/
if [ $(grep -c '== FAILURES ==' ${ut_log_name}) != 0 ] || [ $(grep -c '== ERRORS ==' ${ut_log_name}) != 0 ] || [ $(grep -c ' passed' ${ut_log_name}) == 0 ]; then
@@ -30,4 +29,7 @@ if [ $(grep -c '== FAILURES ==' ${ut_log_name}) != 0 ] || [ $(grep -c '== ERRORS
exit 1
fi
+# if ut pass, collect the coverage file into artifacts
+cp .coverage ${LOG_DIR}/.coverage
+
echo "UT finished successfully! "
\ No newline at end of file
diff --git a/.azure-pipelines/scripts/ut/3x/run_3x_tf.sh b/.azure-pipelines/scripts/ut/3x/run_3x_tf.sh
index 3ac969c5c4f..453c9d46321 100644
--- a/.azure-pipelines/scripts/ut/3x/run_3x_tf.sh
+++ b/.azure-pipelines/scripts/ut/3x/run_3x_tf.sh
@@ -34,7 +34,6 @@ mkdir -p report
mv *.html report
pytest_html_merger -i ./report -o ./report.html
-cp .coverage ${LOG_DIR}/.coverage
cp report.html ${LOG_DIR}/
if [ $(grep -c '== FAILURES ==' ${ut_log_name}) != 0 ] || [ $(grep -c '== ERRORS ==' ${ut_log_name}) != 0 ] || [ $(grep -c ' passed' ${ut_log_name}) == 0 ]; then
@@ -43,4 +42,7 @@ if [ $(grep -c '== FAILURES ==' ${ut_log_name}) != 0 ] || [ $(grep -c '== ERRORS
exit 1
fi
+# if ut pass, collect the coverage file into artifacts
+cp .coverage ${LOG_DIR}/.coverage
+
echo "UT finished successfully! "
\ No newline at end of file
diff --git a/.azure-pipelines/scripts/ut/collect_log.sh b/.azure-pipelines/scripts/ut/collect_log.sh
index cc5bc1618b9..e6749046d6e 100644
--- a/.azure-pipelines/scripts/ut/collect_log.sh
+++ b/.azure-pipelines/scripts/ut/collect_log.sh
@@ -9,13 +9,8 @@ cd /neural-compressor/log_dir
$BOLD_YELLOW && echo "collect coverage for PR branch" && $RESET
mkdir -p coverage_PR
-cp ut-coverage-adaptor/.coverage.adaptor ./coverage_PR/
-cp ut-coverage-api/.coverage.api ./coverage_PR/
-cp ut-coverage-tf-pruning/.coverage.tf_pruning ./coverage_PR/
-cp ut-coverage-pt-pruning/.coverage.pt_pruning ./coverage_PR/
-cp ut-coverage-tfnewapi/.coverage.tfnewapi ./coverage_PR/
-cp ut-coverage-others/.coverage.others ./coverage_PR/
-cp ut-coverage-itex/.coverage.itex ./coverage_PR/
+cp ut_*_coverage/.coverage.* ./coverage_PR/
+
cd coverage_PR
coverage combine --keep --rcfile=${COVERAGE_RCFILE}
cp .coverage /neural-compressor/.coverage
@@ -26,24 +21,20 @@ coverage xml -o log_dir/coverage_PR/coverage.xml --rcfile=${COVERAGE_RCFILE}
ls -l log_dir/coverage_PR/htmlcov
cd /neural-compressor
+cp -r /neural-compressor/.azure-pipelines .azure-pipelines-pr
git config --global --add safe.directory /neural-compressor
git fetch
git checkout master
rm -rf build dist *egg-info
echo y | pip uninstall neural-compressor
-cd /neural-compressor/.azure-pipelines/scripts && bash install_nc.sh
+cd /neural-compressor/.azure-pipelines-pr/scripts && bash install_nc.sh
$BOLD_YELLOW && echo "collect coverage for baseline" && $RESET
coverage erase
cd /neural-compressor/log_dir
mkdir -p coverage_base
-cp ut-coverage-adaptor-base/.coverage.adaptor ./coverage_base/
-cp ut-coverage-api-base/.coverage.api ./coverage_base/
-cp ut-coverage-tf-pruning-base/.coverage.tf_pruning ./coverage_base/
-cp ut-coverage-pt-pruning-base/.coverage.pt_pruning ./coverage_base/
-cp ut-coverage-tfnewapi-base/.coverage.tfnewapi ./coverage_base/
-cp ut-coverage-others-base/.coverage.others ./coverage_base/
-cp ut-coverage-itex-base/.coverage.itex ./coverage_base/
+cp ut-base_*_coverage/.coverage.* ./coverage_base/
+
cd coverage_base
coverage combine --keep --rcfile=${COVERAGE_RCFILE}
cp .coverage /neural-compressor/.coverage
@@ -137,10 +128,10 @@ if [[ ${#fail_items[@]} -ne 0 ]]; then
$BOLD_RED && echo "Unit Test failed with ${item} coverage decrease ${decrease}%" && $RESET
done
$BOLD_RED && echo "compare coverage to give detail info" && $RESET
- bash /neural-compressor/.azure-pipelines/scripts/ut/compare_coverage.sh ${coverage_compare} ${coverage_log} ${coverage_log_base} "FAILED" ${coverage_PR_lines_rate} ${coverage_base_lines_rate} ${coverage_PR_branches_rate} ${coverage_base_branches_rate}
+ bash /neural-compressor/.azure-pipelines-pr/scripts/ut/compare_coverage.sh ${coverage_compare} ${coverage_log} ${coverage_log_base} "FAILED" ${coverage_PR_lines_rate} ${coverage_base_lines_rate} ${coverage_PR_branches_rate} ${coverage_base_branches_rate}
exit 1
else
$BOLD_GREEN && echo "Unit Test success with coverage lines: ${coverage_PR_lines_rate}%, branches: ${coverage_PR_branches_rate}%" && $RESET
$BOLD_GREEN && echo "compare coverage to give detail info" && $RESET
- bash /neural-compressor/.azure-pipelines/scripts/ut/compare_coverage.sh ${coverage_compare} ${coverage_log} ${coverage_log_base} "SUCCESS" ${coverage_PR_lines_rate} ${coverage_base_lines_rate} ${coverage_PR_branches_rate} ${coverage_base_branches_rate}
+ bash /neural-compressor/.azure-pipelines-pr/scripts/ut/compare_coverage.sh ${coverage_compare} ${coverage_log} ${coverage_log_base} "SUCCESS" ${coverage_PR_lines_rate} ${coverage_base_lines_rate} ${coverage_PR_branches_rate} ${coverage_base_branches_rate}
fi
diff --git a/.azure-pipelines/scripts/ut/compare_coverage.sh b/.azure-pipelines/scripts/ut/compare_coverage.sh
index fd059e637aa..0831efbcb4f 100644
--- a/.azure-pipelines/scripts/ut/compare_coverage.sh
+++ b/.azure-pipelines/scripts/ut/compare_coverage.sh
@@ -85,19 +85,21 @@ eof
function extract_diff_data() {
local file_name=$1 diff_file=$2 reg=$3
local file=$(cat $file_name | grep "${diff_file}" | grep -v ".*/${diff_file}" | grep -Po "${reg}.*" | sed "s/${reg}[ \t]*//g" | awk '{print $1}')
+ local stmts=$(cat $file_name | grep "${diff_file}" | grep -v ".*/${diff_file}" | grep -Po "${reg}.*" | sed "s/${reg}[ \t]*//g" | awk '{print $2}')
local miss=$(cat $file_name | grep "${diff_file}" | grep -v ".*/${diff_file}" | grep -Po "${reg}.*" | sed "s/${reg}[ \t]*//g" | awk '{print $3}')
local cover=$(cat $file_name | grep "${diff_file}" | grep -v ".*/${diff_file}" | grep -Po "${reg}.*" | sed "s/${reg}[ \t]*//g" | awk '{print $6}')
local branch=$(cat $file_name | grep "${diff_file}" | grep -v ".*/${diff_file}" | grep -Po "${reg}.*" | sed "s/${reg}[ \t]*//g" | awk '{print $4}')
- echo "$file $miss $cover $branch"
+ echo "$file $stmts $miss $cover $branch"
}
function write_compare_details() {
- local file=$1 miss1=$2 branch1=$3 cover1=$4 miss2=$5 branch2=$6 cover2=$7
+ local file=$1 stmts1=$2 miss1=$3 branch1=$4 cover1=$5 stmts2=$6 miss2=$7 branch2=$8 cover2=$9
echo """
PR | BASE |
${file} |
+ ${stmts1} | ${stmts2} |
${miss1} | ${miss2} |
${branch1} | ${branch2} |
${cover1} | ${cover2} |
@@ -159,6 +161,7 @@ function generate_coverage_details() {
Commit |
FileName |
+ Stmts |
Miss |
Branch |
Cover |
@@ -172,8 +175,8 @@ function generate_coverage_details() {
for diff_file in ${diff_file_name}; do
diff_file=$(echo "${diff_file}" | sed 's/[ \t]*//g')
diff_coverage_data=$(extract_diff_data ${file_name} ${diff_file} ">")
- read file miss cover branch <<<"$diff_coverage_data"
- write_compare_details $file "NA" "NA" "NA" $miss $branch $cover
+ read file stmts miss cover branch <<<"$diff_coverage_data"
+ write_compare_details $file "NA" "NA" "NA" "NA" $stmts $miss $branch $cover
done
elif [[ $(echo $line | grep "[0-9]c[0-9]") ]] && [[ $(cat ${file_name} | grep -A 1 "$line" | grep "<") ]]; then
diff_lines=$(sed -n "/${line}/,/^[0-9]/p" ${file_name} | grep "<")
@@ -181,10 +184,10 @@ function generate_coverage_details() {
for diff_file in ${diff_file_name}; do
diff_file=$(echo "${diff_file}" | sed 's/[ \t]*//g')
diff_coverage_data1=$(extract_diff_data ${file_name} ${diff_file} "<")
- read file1 miss1 cover1 branch1 <<<"$diff_coverage_data1"
+ read file1 stmts1 miss1 cover1 branch1 <<<"$diff_coverage_data1"
diff_coverage_data2=$(extract_diff_data ${file_name} ${diff_file} ">")
- read file2 miss2 cover2 branch2 <<<"$diff_coverage_data2"
- write_compare_details $file1 $miss1 $branch1 $cover1 $miss2 $branch2 $cover2
+ read file2 stmts2 miss2 cover2 branch2 <<<"$diff_coverage_data2"
+ write_compare_details $file1 $stmts1 $miss1 $branch1 $cover1 $stmts2 $miss2 $branch2 $cover2
done
elif [[ $(echo $line | grep "[0-9]d[0-9]") ]] && [[ $(cat ${file_name} | grep -A 1 "$line" | grep "<") ]]; then
diff_lines=$(sed -n "/${line}/,/^[0-9]/p" ${file_name} | grep "<")
@@ -192,8 +195,8 @@ function generate_coverage_details() {
for diff_file in ${diff_file_name}; do
diff_file=$(echo "${diff_file}" | sed 's/[ \t]*//g')
diff_coverage_data=$(extract_diff_data ${file_name} ${diff_file} "<")
- read file miss cover branch <<<"$diff_coverage_data"
- write_compare_details $file $miss $branch $cover "NA" "NA" "NA"
+ read file stmts miss cover branch <<<"$diff_coverage_data"
+ write_compare_details $file $stmts $miss $branch $cover "NA" "NA" "NA" "NA"
done
fi
done
diff --git a/.azure-pipelines/scripts/ut/run_basic_adaptor.sh b/.azure-pipelines/scripts/ut/run_basic_adaptor.sh
index fb578e4c83d..787345dbfdb 100644
--- a/.azure-pipelines/scripts/ut/run_basic_adaptor.sh
+++ b/.azure-pipelines/scripts/ut/run_basic_adaptor.sh
@@ -22,11 +22,13 @@ sort run.sh -o run.sh
cat run.sh | tee ${ut_log_name}
echo "------UT start-------"
bash -x run.sh 2>&1 | tee -a ${ut_log_name}
-cp .coverage ${LOG_DIR}/.coverage.adaptor
echo "------UT end -------"
if [ $(grep -c "FAILED" ${ut_log_name}) != 0 ] || [ $(grep -c "core dumped" ${ut_log_name}) != 0 ] || [ $(grep -c "ModuleNotFoundError:" ${ut_log_name}) != 0 ] || [ $(grep -c "OK" ${ut_log_name}) == 0 ];then
echo "Find errors in UT test, please check the output..."
exit 1
fi
+
+cp .coverage ${LOG_DIR}/.coverage.adaptor
+
echo "UT finished successfully! "
\ No newline at end of file
diff --git a/.azure-pipelines/scripts/ut/run_basic_adaptor_tfnewapi.sh b/.azure-pipelines/scripts/ut/run_basic_adaptor_tfnewapi.sh
index 9373631f5f7..95c398328d4 100644
--- a/.azure-pipelines/scripts/ut/run_basic_adaptor_tfnewapi.sh
+++ b/.azure-pipelines/scripts/ut/run_basic_adaptor_tfnewapi.sh
@@ -23,11 +23,11 @@ sort run.sh -o run.sh
cat run.sh | tee ${ut_log_name}
echo "------UT start-------"
bash -x run.sh 2>&1 | tee -a ${ut_log_name}
-cp .coverage ${LOG_DIR}/.coverage.tfnewapi
echo "------UT end -------"
if [ $(grep -c "FAILED" ${ut_log_name}) != 0 ] || [ $(grep -c "core dumped" ${ut_log_name}) != 0 ] || [ $(grep -c "ModuleNotFoundError:" ${ut_log_name}) != 0 ] || [ $(grep -c "OK" ${ut_log_name}) == 0 ];then
echo "Find errors in UT test, please check the output..."
exit 1
fi
+cp .coverage ${LOG_DIR}/.coverage.tfnewapi
echo "UT finished successfully! "
\ No newline at end of file
diff --git a/.azure-pipelines/scripts/ut/run_basic_api.sh b/.azure-pipelines/scripts/ut/run_basic_api.sh
index 4a014a2eecf..02b75074ec5 100644
--- a/.azure-pipelines/scripts/ut/run_basic_api.sh
+++ b/.azure-pipelines/scripts/ut/run_basic_api.sh
@@ -28,11 +28,11 @@ sort run.sh -o run.sh
cat run.sh | tee ${ut_log_name}
echo "------UT start-------"
bash -x run.sh 2>&1 | tee -a ${ut_log_name}
-cp .coverage ${LOG_DIR}/.coverage.api
echo "------UT end -------"
if [ $(grep -c "FAILED" ${ut_log_name}) != 0 ] || [ $(grep -c "core dumped" ${ut_log_name}) != 0 ] || [ $(grep -c "ModuleNotFoundError:" ${ut_log_name}) != 0 ] || [ $(grep -c "OK" ${ut_log_name}) == 0 ];then
echo "Find errors in UT test, please check the output..."
exit 1
fi
+cp .coverage ${LOG_DIR}/.coverage.api
echo "UT finished successfully! "
\ No newline at end of file
diff --git a/.azure-pipelines/scripts/ut/run_basic_itex.sh b/.azure-pipelines/scripts/ut/run_basic_itex.sh
index 1c227d5e095..440ea10a8dc 100644
--- a/.azure-pipelines/scripts/ut/run_basic_itex.sh
+++ b/.azure-pipelines/scripts/ut/run_basic_itex.sh
@@ -25,11 +25,11 @@ sort run.sh -o run.sh
cat run.sh | tee ${ut_log_name}
echo "------UT start-------"
bash -x run.sh 2>&1 | tee -a ${ut_log_name}
-cp .coverage ${LOG_DIR}/.coverage.itex
echo "------UT end -------"
if [ $(grep -c "FAILED" ${ut_log_name}) != 0 ] || [ $(grep -c "core dumped" ${ut_log_name}) != 0 ] || [ $(grep -c "ModuleNotFoundError:" ${ut_log_name}) != 0 ] || [ $(grep -c "OK" ${ut_log_name}) == 0 ];then
echo "Find errors in UT test, please check the output..."
exit 1
fi
+cp .coverage ${LOG_DIR}/.coverage.itex
echo "UT finished successfully! "
\ No newline at end of file
diff --git a/.azure-pipelines/scripts/ut/run_basic_others.sh b/.azure-pipelines/scripts/ut/run_basic_others.sh
index e5abf4a293f..0dcc0845db7 100644
--- a/.azure-pipelines/scripts/ut/run_basic_others.sh
+++ b/.azure-pipelines/scripts/ut/run_basic_others.sh
@@ -41,11 +41,11 @@ sort run.sh -o run.sh
cat run.sh | tee ${ut_log_name}
echo "------UT start-------"
bash -x run.sh 2>&1 | tee -a ${ut_log_name}
-cp .coverage ${LOG_DIR}/.coverage.others
echo "------UT end -------"
if [ $(grep -c "FAILED" ${ut_log_name}) != 0 ] || [ $(grep -c "core dumped" ${ut_log_name}) != 0 ] || [ $(grep -c "ModuleNotFoundError:" ${ut_log_name}) != 0 ] || [ $(grep -c "OK" ${ut_log_name}) == 0 ];then
echo "Find errors in UT test, please check the output..."
exit 1
fi
+cp .coverage ${LOG_DIR}/.coverage.others
echo "UT finished successfully! "
\ No newline at end of file
diff --git a/.azure-pipelines/scripts/ut/run_basic_pt_pruning.sh b/.azure-pipelines/scripts/ut/run_basic_pt_pruning.sh
index 828db2f6d96..5857b908394 100644
--- a/.azure-pipelines/scripts/ut/run_basic_pt_pruning.sh
+++ b/.azure-pipelines/scripts/ut/run_basic_pt_pruning.sh
@@ -25,11 +25,11 @@ sort run.sh -o run.sh
cat run.sh | tee ${ut_log_name}
echo "------UT start-------"
bash -x run.sh 2>&1 | tee -a ${ut_log_name}
-cp .coverage ${LOG_DIR}/.coverage.pt_pruning
echo "------UT end -------"
if [ $(grep -c "FAILED" ${ut_log_name}) != 0 ] || [ $(grep -c "core dumped" ${ut_log_name}) != 0 ] || [ $(grep -c "ModuleNotFoundError:" ${ut_log_name}) != 0 ] || [ $(grep -c "OK" ${ut_log_name}) == 0 ];then
echo "Find errors in UT test, please check the output..."
exit 1
fi
+cp .coverage ${LOG_DIR}/.coverage.pt_pruning
echo "UT finished successfully! "
\ No newline at end of file
diff --git a/.azure-pipelines/scripts/ut/run_basic_tf_pruning.sh b/.azure-pipelines/scripts/ut/run_basic_tf_pruning.sh
index 60d126865c4..5540c8820a2 100644
--- a/.azure-pipelines/scripts/ut/run_basic_tf_pruning.sh
+++ b/.azure-pipelines/scripts/ut/run_basic_tf_pruning.sh
@@ -23,11 +23,11 @@ sort run.sh -o run.sh
cat run.sh | tee ${ut_log_name}
echo "------UT start-------"
bash -x run.sh 2>&1 | tee -a ${ut_log_name}
-cp .coverage ${LOG_DIR}/.coverage.tf_pruning
echo "------UT end -------"
if [ $(grep -c "FAILED" ${ut_log_name}) != 0 ] || [ $(grep -c "core dumped" ${ut_log_name}) != 0 ] || [ $(grep -c "ModuleNotFoundError:" ${ut_log_name}) != 0 ] || [ $(grep -c "OK" ${ut_log_name}) == 0 ];then
echo "Find errors in UT test, please check the output..."
exit 1
fi
+cp .coverage ${LOG_DIR}/.coverage.tf_pruning
echo "UT finished successfully! "
\ No newline at end of file
diff --git a/.azure-pipelines/template/code-scan-template.yml b/.azure-pipelines/template/code-scan-template.yml
index 66412006cef..b8d121b4991 100644
--- a/.azure-pipelines/template/code-scan-template.yml
+++ b/.azure-pipelines/template/code-scan-template.yml
@@ -29,7 +29,7 @@ steps:
condition: succeededOrFailed()
inputs:
targetPath: .azure-pipelines/scripts/codeScan/scanLog/${{ parameters.uploadPath }}
- artifact: ${{ parameters.codeScanFileName }}
+ artifact: $(System.JobAttempt)_${{ parameters.codeScanFileName }}
publishLocation: "pipeline"
displayName: "PublishPipelineArtifact"
diff --git a/.azure-pipelines/template/docker-template.yml b/.azure-pipelines/template/docker-template.yml
index bf0ad790ca6..c14db78a2ad 100644
--- a/.azure-pipelines/template/docker-template.yml
+++ b/.azure-pipelines/template/docker-template.yml
@@ -28,7 +28,7 @@ steps:
echo "remove left files through container ..."
docker exec ${{ parameters.containerName }} bash -c "ls -a /neural-compressor && rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* && ls -a /neural-compressor || true"
fi
- displayName: "Docker clean up"
+ displayName: "Docker workspace clean up"
- ${{ if eq(parameters.dockerConfigName, 'commonDockerConfig') }}:
- script: |
diff --git a/.azure-pipelines/template/ut-template.yml b/.azure-pipelines/template/ut-template.yml
index 7b771a53161..b7fecacd3d7 100644
--- a/.azure-pipelines/template/ut-template.yml
+++ b/.azure-pipelines/template/ut-template.yml
@@ -35,11 +35,19 @@ steps:
displayName: "Run UT"
- task: PublishPipelineArtifact@1
+ condition: succeededOrFailed()
inputs:
targetPath: ${{ parameters.uploadPath }}
- artifact: ${{ parameters.utArtifact }}
+ artifact: $(System.JobAttempt)_${{ parameters.utArtifact }}_report
publishLocation: "pipeline"
+ - ${{ if eq(parameters.utTestMode, 'coverage') }}:
+ - task: PublishPipelineArtifact@1
+ inputs:
+ targetPath: ${{ parameters.uploadPath }}
+ artifact: ${{ parameters.utArtifact }}_coverage
+ publishLocation: "pipeline"
+
- task: Bash@3
condition: always()
inputs:
diff --git a/.azure-pipelines/ut-3x-ort.yml b/.azure-pipelines/ut-3x-ort.yml
index b3f18ec503e..42636df2314 100644
--- a/.azure-pipelines/ut-3x-ort.yml
+++ b/.azure-pipelines/ut-3x-ort.yml
@@ -39,7 +39,7 @@ stages:
dockerConfigName: "commonDockerConfig"
utScriptFileName: "3x/run_3x_ort"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut_coverage_3x"
+ utArtifact: "ut_3x"
- stage: ONNXRT_baseline
@@ -54,11 +54,11 @@ stages:
dockerConfigName: "gitCloneDockerConfig"
utScriptFileName: "3x/run_3x_ort"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut_coverage_3x_baseline"
+ utArtifact: "ut_3x_baseline"
repo: $(REPO)
- stage: Coverage
- displayName: "Coverage Combine"
+ displayName: "Coverage Compare"
pool:
vmImage: "ubuntu-latest"
dependsOn: [ONNXRT, ONNXRT_baseline]
@@ -79,6 +79,7 @@ stages:
- task: DownloadPipelineArtifact@2
inputs:
artifact:
+ patterns: '*_coverage/.coverage'
path: $(DOWNLOAD_PATH)
- script: |
diff --git a/.azure-pipelines/ut-3x-pt.yml b/.azure-pipelines/ut-3x-pt.yml
index 846360e92db..2eec560e537 100644
--- a/.azure-pipelines/ut-3x-pt.yml
+++ b/.azure-pipelines/ut-3x-pt.yml
@@ -39,7 +39,7 @@ stages:
dockerConfigName: "commonDockerConfig"
utScriptFileName: "3x/run_3x_pt"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut_coverage_3x"
+ utArtifact: "ut_3x"
- stage: Torch_baseline
@@ -54,11 +54,11 @@ stages:
dockerConfigName: "gitCloneDockerConfig"
utScriptFileName: "3x/run_3x_pt"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut_coverage_3x_baseline"
+ utArtifact: "ut_3x_baseline"
repo: $(REPO)
- stage: Coverage
- displayName: "Coverage Combine"
+ displayName: "Coverage Compare"
pool:
vmImage: "ubuntu-latest"
dependsOn: [Torch, Torch_baseline]
@@ -79,6 +79,7 @@ stages:
- task: DownloadPipelineArtifact@2
inputs:
artifact:
+ patterns: '*_coverage/.coverage'
path: $(DOWNLOAD_PATH)
- script: |
diff --git a/.azure-pipelines/ut-3x-tf.yml b/.azure-pipelines/ut-3x-tf.yml
index 88088278809..cbc19faa7d0 100644
--- a/.azure-pipelines/ut-3x-tf.yml
+++ b/.azure-pipelines/ut-3x-tf.yml
@@ -39,7 +39,7 @@ stages:
dockerConfigName: "commonDockerConfig"
utScriptFileName: "3x/run_3x_tf"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut_coverage_3x"
+ utArtifact: "ut_3x"
- stage: TensorFlow_baseline
@@ -54,11 +54,11 @@ stages:
dockerConfigName: "gitCloneDockerConfig"
utScriptFileName: "3x/run_3x_tf"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut_coverage_3x_baseline"
+ utArtifact: "ut_3x_baseline"
repo: $(REPO)
- stage: Coverage
- displayName: "Coverage Combine"
+ displayName: "Coverage Compare"
pool:
vmImage: "ubuntu-latest"
dependsOn: [TensorFlow, TensorFlow_baseline]
@@ -79,6 +79,7 @@ stages:
- task: DownloadPipelineArtifact@2
inputs:
artifact:
+ patterns: '*_coverage/.coverage'
path: $(DOWNLOAD_PATH)
- script: |
diff --git a/.azure-pipelines/ut-basic-no-cover.yml b/.azure-pipelines/ut-basic-no-cover.yml
index 1935bd32453..9a0fbb190b5 100644
--- a/.azure-pipelines/ut-basic-no-cover.yml
+++ b/.azure-pipelines/ut-basic-no-cover.yml
@@ -45,7 +45,7 @@ stages:
dockerConfigName: "commonDockerConfig"
utScriptFileName: "run_basic_adaptor"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut-adaptor"
+ utArtifact: "ut_adaptor"
utTestMode: "no-coverage"
utContainerName: "utTest-no-coverage"
@@ -61,7 +61,7 @@ stages:
dockerConfigName: "commonDockerConfig"
utScriptFileName: "run_basic_api"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut-coverage-api"
+ utArtifact: "ut_api"
utTestMode: "no-coverage"
utContainerName: "utTest-no-coverage"
@@ -77,7 +77,7 @@ stages:
dockerConfigName: "commonDockerConfig"
utScriptFileName: "run_basic_pt_pruning"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut-coverage-pt-pruning"
+ utArtifact: "ut_pt-pruning"
utTestMode: "no-coverage"
utContainerName: "utTest-no-coverage"
- job:
@@ -88,7 +88,7 @@ stages:
dockerConfigName: "commonDockerConfig"
utScriptFileName: "run_basic_tf_pruning"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut-coverage-tf-pruning"
+ utArtifact: "ut_tf-pruning"
utTestMode: "no-coverage"
utContainerName: "utTest-no-coverage"
@@ -104,6 +104,6 @@ stages:
dockerConfigName: "commonDockerConfig"
utScriptFileName: "run_basic_others"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut-others"
+ utArtifact: "ut_others"
utTestMode: "no-coverage"
utContainerName: "utTest-no-coverage"
diff --git a/.azure-pipelines/ut-basic.yml b/.azure-pipelines/ut-basic.yml
index ebbefaa4de6..a6e34a466ca 100644
--- a/.azure-pipelines/ut-basic.yml
+++ b/.azure-pipelines/ut-basic.yml
@@ -45,7 +45,7 @@ stages:
dockerConfigName: "commonDockerConfig"
utScriptFileName: "run_basic_adaptor"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut-coverage-adaptor"
+ utArtifact: "ut_adaptor"
- stage: API
displayName: Unit Test User facing API
@@ -59,7 +59,7 @@ stages:
dockerConfigName: "commonDockerConfig"
utScriptFileName: "run_basic_api"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut-coverage-api"
+ utArtifact: "ut_api"
- stage: Pruning
displayName: Unit Test Pruning
@@ -73,7 +73,7 @@ stages:
dockerConfigName: "commonDockerConfig"
utScriptFileName: "run_basic_pt_pruning"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut-coverage-pt-pruning"
+ utArtifact: "ut_pt-pruning"
- job:
displayName: Test TensorFlow Pruning
steps:
@@ -82,7 +82,7 @@ stages:
dockerConfigName: "commonDockerConfig"
utScriptFileName: "run_basic_tf_pruning"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut-coverage-tf-pruning"
+ utArtifact: "ut_tf-pruning"
- stage: TFNewAPI
displayName: Unit Test TF newAPI
@@ -96,7 +96,7 @@ stages:
dockerConfigName: "commonDockerConfig"
utScriptFileName: "run_basic_adaptor_tfnewapi"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut-coverage-tfnewapi"
+ utArtifact: "ut_tfnewapi"
- stage: ITEX
displayName: Unit Test ITEX
@@ -110,7 +110,7 @@ stages:
dockerConfigName: "commonDockerConfig"
utScriptFileName: "run_basic_itex"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut-coverage-itex"
+ utArtifact: "ut_itex"
- stage: Others
displayName: Unit Test other basic case
@@ -124,7 +124,7 @@ stages:
dockerConfigName: "commonDockerConfig"
utScriptFileName: "run_basic_others"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut-coverage-others"
+ utArtifact: "ut_others"
- stage: Adaptor_base
displayName: Unit Test FWKs adaptor baseline
@@ -138,7 +138,7 @@ stages:
dockerConfigName: "gitCloneDockerConfig"
utScriptFileName: "run_basic_adaptor"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut-coverage-adaptor-base"
+ utArtifact: "ut-base_adaptor"
repo: $(REPO)
- stage: API_base
@@ -153,7 +153,7 @@ stages:
dockerConfigName: "gitCloneDockerConfig"
utScriptFileName: "run_basic_api"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut-coverage-api-base"
+ utArtifact: "ut-base_api"
repo: $(REPO)
- stage: Pruning_base
@@ -168,7 +168,7 @@ stages:
dockerConfigName: "gitCloneDockerConfig"
utScriptFileName: "run_basic_pt_pruning"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut-coverage-pt-pruning-base"
+ utArtifact: "ut-base_pt-pruning"
repo: $(REPO)
- job:
displayName: Test TensorFlow Pruning baseline
@@ -178,7 +178,7 @@ stages:
dockerConfigName: "gitCloneDockerConfig"
utScriptFileName: "run_basic_tf_pruning"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut-coverage-tf-pruning-base"
+ utArtifact: "ut-base_tf-pruning"
repo: $(REPO)
- stage: TFNewAPI_base
@@ -193,7 +193,7 @@ stages:
dockerConfigName: "gitCloneDockerConfig"
utScriptFileName: "run_basic_adaptor_tfnewapi"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut-coverage-tfnewapi-base"
+ utArtifact: "ut-base_tfnewapi"
repo: $(REPO)
- stage: ITEX_base
@@ -208,7 +208,7 @@ stages:
dockerConfigName: "gitCloneDockerConfig"
utScriptFileName: "run_basic_itex"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut-coverage-itex-base"
+ utArtifact: "ut-base_itex"
repo: $(REPO)
- stage: Others_base
@@ -223,11 +223,11 @@ stages:
dockerConfigName: "gitCloneDockerConfig"
utScriptFileName: "run_basic_others"
uploadPath: $(UPLOAD_PATH)
- utArtifact: "ut-coverage-others-base"
+ utArtifact: "ut-base_others"
repo: $(REPO)
- stage: Coverage
- displayName: "Coverage Combine"
+ displayName: "Coverage Compare"
pool:
vmImage: "ubuntu-latest"
dependsOn: [Adaptor, API, Pruning, TFNewAPI, ITEX, Others, Adaptor_base, API_base, Pruning_base, TFNewAPI_base, ITEX_base, Others_base]
@@ -248,6 +248,7 @@ stages:
- task: DownloadPipelineArtifact@2
inputs:
artifact:
+ patterns: '*_coverage/.coverage.*'
path: $(DOWNLOAD_PATH)
- script: |
diff --git a/.azure-pipelines/ut-itrex.yml b/.azure-pipelines/ut-itrex.yml
index 7da4fd3714f..574c8a32677 100644
--- a/.azure-pipelines/ut-itrex.yml
+++ b/.azure-pipelines/ut-itrex.yml
@@ -12,6 +12,7 @@ pr:
- setup.py
- requirements.txt
- .azure-pipelines/scripts/ut/run_itrex.sh
+ - .azure-pipelines/ut-itrex.yml
exclude:
- neural_compressor/common
- neural_compressor/torch
@@ -33,5 +34,6 @@ stages:
dockerConfigName: 'commonDockerConfig'
utScriptFileName: 'run_itrex'
uploadPath: $(UPLOAD_PATH)
- utArtifact: 'ut-itrex'
+ utArtifact: 'ut_itrex'
+ utTestMode: "no-coverage"
utContainerName: "utTest-itrex"
diff --git a/.azure-pipelines/ut-ncoder.yml b/.azure-pipelines/ut-ncoder.yml
index 904013ea639..a1512ae4803 100644
--- a/.azure-pipelines/ut-ncoder.yml
+++ b/.azure-pipelines/ut-ncoder.yml
@@ -28,5 +28,6 @@ stages:
dockerConfigName: 'commonDockerConfig'
utScriptFileName: 'run_ncoder'
uploadPath: $(UPLOAD_PATH)
- utArtifact: 'ut-ncoder'
+ utArtifact: 'ut_ncoder'
+ utTestMode: "no-coverage"
utContainerName: "utTest-ncoder"
diff --git a/.azure-pipelines/ut-neural-insights.yaml b/.azure-pipelines/ut-neural-insights.yaml
index 5618d38cb79..b73026c623b 100644
--- a/.azure-pipelines/ut-neural-insights.yaml
+++ b/.azure-pipelines/ut-neural-insights.yaml
@@ -10,6 +10,7 @@ pr:
include:
- neural_insights
- setup.py
+ - .azure-pipelines/ut-neural-insights.yaml
pool: ICX-16C
@@ -27,5 +28,6 @@ stages:
dockerConfigName: 'commonDockerConfig'
utScriptFileName: 'run_neural_insights'
uploadPath: $(UPLOAD_PATH)
- utArtifact: 'ut-neural-insights'
+ utArtifact: 'ut_neural-insights'
+ utTestMode: "no-coverage"
utContainerName: "utTest-nInsights"
diff --git a/.azure-pipelines/ut-neural-solution.yaml b/.azure-pipelines/ut-neural-solution.yaml
index 064ca538735..df717899b57 100644
--- a/.azure-pipelines/ut-neural-solution.yaml
+++ b/.azure-pipelines/ut-neural-solution.yaml
@@ -10,6 +10,7 @@ pr:
include:
- neural_solution
- setup.py
+ - .azure-pipelines/ut-neural-solution.yaml
pool: ICX-16C
@@ -27,5 +28,6 @@ stages:
dockerConfigName: 'commonDockerConfig'
utScriptFileName: 'run_neural_solution'
uploadPath: $(UPLOAD_PATH)
- utArtifact: 'ut-neural-solution'
+ utArtifact: 'ut_neural-solution'
+ utTestMode: "no-coverage"
utContainerName: "utTest-nSolution"
diff --git a/.github/checkgroup.yml b/.github/checkgroup.yml
new file mode 100644
index 00000000000..4976b89a2a2
--- /dev/null
+++ b/.github/checkgroup.yml
@@ -0,0 +1,193 @@
+custom_service_name: "CI checker"
+subprojects:
+ - id: "Code Scan Tests workflow"
+ paths:
+ - "neural_compressor/**"
+ - "setup.py"
+ - "requirements.txt"
+ - ".azure-pipelines/code-scan.yml"
+ - ".azure-pipelines/scripts/codeScan/**"
+ checks:
+ - "Code-Scan"
+ - "Code-Scan (Bandit Code Scan Bandit)"
+ - "Code-Scan (DocStyle Code Scan DocStyle)"
+ - "Code-Scan (Pylint Code Scan Pylint)"
+
+ - id: "Code Scan Neural-Insights Tests workflow"
+ paths:
+ - "neural_insights/**"
+ - "setup.py"
+ - ".azure-pipelines/code-scan-neural-insights.yml"
+ checks:
+ - "Code-Scan-Neural-Insights"
+ - "Code-Scan-Neural-Insights (Bandit Code Scan Bandit)"
+ - "Code-Scan-Neural-Insights (DocStyle Code Scan DocStyle)"
+ - "Code-Scan-Neural-Insights (Pylint Code Scan Pylint)"
+
+ - id: "Code Scan Neural-Solution Tests workflow"
+ paths:
+ - "neural_solution/**"
+ - "setup.py"
+ - ".azure-pipelines/code-scan-neural-solution.yml"
+ checks:
+ - "Code-Scan-Neural-Solution"
+ - "Code-Scan-Neural-Solution (Bandit Code Scan Bandit)"
+ - "Code-Scan-Neural-Solution (DocStyle Code Scan DocStyle)"
+ - "Code-Scan-Neural-Solution (Pylint Code Scan Pylint)"
+
+ - id: "Model Tests workflow"
+ paths:
+ - "neural_compressor/**"
+ - "setup.py"
+ - "requirements.txt"
+ - ".azure-pipelines/scripts/models/**"
+ - "examples/tensorflow/oob_models/quantization/ptq/**"
+ - "!test"
+ - "!neural_compressor/common/**"
+ - "!neural_compressor/torch/**"
+ - "!neural_compressor/tensorflow/**"
+ - "!neural_compressor/onnxrt/**"
+ checks:
+ - "Model-Test"
+ - "Model-Test (Generate Report GenerateReport)"
+ - "Model-Test (Run MXNet Model resnet50v1)"
+ - "Model-Test (Run ONNX Model resnet50-v1-12)"
+ - "Model-Test (Run PyTorch Model resnet18)"
+ - "Model-Test (Run PyTorch Model resnet18_fx)"
+ - "Model-Test (Run TensorFlow Model darknet19)"
+ - "Model-Test (Run TensorFlow Model densenet-121)"
+ - "Model-Test (Run TensorFlow Model inception_v1)"
+ - "Model-Test (Run TensorFlow Model resnet-101)"
+ - "Model-Test (Run TensorFlow Model resnet50v1.5)"
+ - "Model-Test (Run TensorFlow Model ssd_mobilenet_v1_ckpt)"
+ - "Model-Test (Run TensorFlow Model ssd_resnet50_v1)"
+
+ - id: "Unit Tests basic workflow"
+ paths:
+ - "neural_compressor/**"
+ - "test/**"
+ - "setup.py"
+ - "requirements.txt"
+ - ".azure-pipelines/scripts/ut/**"
+ - "!test/neural_coder/**"
+ - "!test/3x/**"
+ - "!neural_compressor/common/**"
+ - "!neural_compressor/torch/**"
+ - "!neural_compressor/tensorflow/**"
+ - "!neural_compressor/onnxrt/**"
+ - "!.azure-pipelines/scripts/ut/3x/**"
+ checks:
+ - "UT-Basic"
+ - "UT-Basic (Coverage Compare CollectDatafiles)"
+ - "UT-Basic (Unit Test FWKs adaptor Test FWKs adaptor)"
+ - "UT-Basic (Unit Test FWKs adaptor baseline Test FWKs adaptor baseline)"
+ - "UT-Basic (Unit Test ITEX Test ITEX)"
+ - "UT-Basic (Unit Test ITEX baseline Test ITEX baseline)"
+ - "UT-Basic (Unit Test Pruning Test PyTorch Pruning)"
+ - "UT-Basic (Unit Test Pruning Test TensorFlow Pruning)"
+ - "UT-Basic (Unit Test Pruning baseline Test PyTorch Pruning baseline)"
+ - "UT-Basic (Unit Test Pruning baseline Test TensorFlow Pruning baseline)"
+ - "UT-Basic (Unit Test TF newAPI Test TF newAPI)"
+ - "UT-Basic (Unit Test TF newAPI baseline Test TF newAPI baseline)"
+ - "UT-Basic (Unit Test User facing API Test User facing API)"
+ - "UT-Basic (Unit Test User facing API baseline Test User facing API baseline)"
+ - "UT-Basic (Unit Test other basic case Test other basic case)"
+ - "UT-Basic (Unit Test other cases baseline Test other cases baseline)"
+
+ - id: "Unit Tests basic no coverage workflow"
+ paths:
+ - "neural_compressor/**"
+ - "test/**"
+ - "setup.py"
+ - "requirements.txt"
+ - ".azure-pipelines/scripts/ut/**"
+ - "!test/neural_coder"
+ - "!test/3x"
+ - "!neural_compressor/common"
+ - "!neural_compressor/torch"
+ - "!neural_compressor/tensorflow"
+ - "!neural_compressor/onnxrt"
+ - "!.azure-pipelines/scripts/ut/3x"
+ checks:
+ - "UT-Basic-No-Coverage"
+ - "UT-Basic-No-Coverage (Unit Test FWKs adaptor Test FWKs adaptor)"
+ - "UT-Basic-No-Coverage (Unit Test Pruning Test PyTorch Pruning)"
+ - "UT-Basic-No-Coverage (Unit Test Pruning Test TensorFlow Pruning)"
+ - "UT-Basic-No-Coverage (Unit Test User facing API Test User facing API)"
+ - "UT-Basic-No-Coverage (Unit Test other basic case Test other basic case)"
+
+ - id: "Unit Tests ITREX workflow"
+ paths:
+ - "neural_compressor/**"
+ - "setup.py"
+ - "requirements.txt"
+ - ".azure-pipelines/scripts/ut/run_itrex.sh"
+ - ".azure-pipelines/ut-itrex.yml"
+ - "!neural_compressor/common/**"
+ - "!neural_compressor/torch/**"
+ - "!neural_compressor/tensorflow/**"
+ checks:
+ - "UT-ITREX"
+
+ - id: "Unit Tests Neural-Insights workflow"
+ paths:
+ - "neural_insights/**"
+ - "setup.py"
+ checks:
+ - "UT-Neural-Insights"
+
+ - id: "Unit Tests Neural-Solution workflow"
+ paths:
+ - "neural_solution/**"
+ - "setup.py"
+ checks:
+ - "UT-Neural-Solution"
+
+ - id: "Unit Tests Neural-Coder workflow"
+ paths:
+ - "neural_coder/**"
+ - "test/neural_coder/**"
+ - "setup.py"
+ checks:
+ - "UT-Coder"
+
+ - id: "Unit Tests 3x-TensorFlow workflow"
+ paths:
+ - "neural_compressor/common/**"
+ - "neural_compressor/tensorflow/**"
+ - "test/3x/tensorflow/**"
+ - "setup.py"
+ - "requirements_tf.txt"
+ checks:
+ - "UT-3x-TensorFlow"
+ - "UT-3x-TensorFlow (Coverage Compare CollectDatafiles)"
+ - "UT-3x-TensorFlow (Unit Test 3x TensorFlow Unit Test 3x TensorFlow)"
+ - "UT-3x-TensorFlow (Unit Test 3x TensorFlow baseline Unit Test 3x TensorFlow baseline)"
+
+ - id: "Unit Tests 3x-PyTorch workflow"
+ paths:
+ - "neural_compressor/common/**"
+ - "neural_compressor/torch/**"
+ - "test/3x/torch/**"
+ - "test/3x/common/**"
+ - "setup.py"
+ - "requirements_pt.txt"
+ - ".azure-pipelines/scripts/ut/3x/collect_log_3x.sh"
+ checks:
+ - "UT-3x-Torch"
+ - "UT-3x-Torch (Coverage Compare CollectDatafiles)"
+ - "UT-3x-Torch (Unit Test 3x Torch Unit Test 3x Torch)"
+ - "UT-3x-Torch (Unit Test 3x Torch baseline Unit Test 3x Torch baseline)"
+
+ - id: "Unit Tests 3x-ONNXRT workflow"
+ paths:
+ - "neural_compressor/common/**"
+ - "neural_compressor/onnxrt/**"
+ - "test/3x/onnxrt/**"
+ - "setup.py"
+ - "requirements_ort.txt"
+ checks:
+ - "UT-3x-ONNXRT"
+ - "UT-3x-ONNXRT (Coverage Compare CollectDatafiles)"
+ - "UT-3x-ONNXRT (Unit Test 3x ONNXRT Unit Test 3x ONNXRT)"
+ - "UT-3x-ONNXRT (Unit Test 3x ONNXRT baseline Unit Test 3x ONNXRT baseline)"
diff --git a/.github/workflows/probot.yml b/.github/workflows/probot.yml
new file mode 100644
index 00000000000..82947f9d616
--- /dev/null
+++ b/.github/workflows/probot.yml
@@ -0,0 +1,26 @@
+name: Probot
+
+on:
+ pull_request:
+ types: [opened, reopened, ready_for_review, synchronize] # added `ready_for_review` since draft is skipped
+ workflow_dispatch:
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref }}
+ cancel-in-progress: true
+
+jobs:
+ required-jobs:
+ runs-on: ubuntu-latest
+ if: github.event.pull_request.draft == false
+ timeout-minutes: 361 # in case something is wrong with the internal timeout
+ steps:
+ - uses: XuehaoSun/probot@0.2
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ with:
+ job: check-group
+ interval: 180 # seconds
+ timeout: 360 # minutes
+ maintainers: ""
+ owner: ""
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index fe946217933..e47b50e6d18 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -37,10 +37,9 @@ repos:
)$
- id: debug-statements
- id: file-contents-sorter
- files: |
+ exclude: |
(?x)^(
- .azure-pipelines/scripts/codeScan/pyspelling/inc_dict.txt|
- .azure-pipelines/scripts/codeScan/pydocstyle/scan_path*.txt
+ examples/.+
)$
args: [--unique]
- id: requirements-txt-fixer