Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added jobs into yaml tests #980

Merged
merged 13 commits into from
Nov 22, 2019
Merged
10 changes: 3 additions & 7 deletions tests/ci/azure_pipeline_test/dsvm_nightly_linux_cpu.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ pr: none

jobs:
- job: nightly
displayName : 'Nightly tests Linux CPU'
displayName: 'Nightly tests Linux CPU'
timeoutInMinutes: 180 # how long to run the job before automatically cancelling
pool:
name: recolinuxpool
Expand All @@ -31,18 +31,16 @@ jobs:
python ./scripts/generate_conda_file.py --name nightly_reco_base && \
conda env create --quiet -f nightly_reco_base.yaml 2> log
displayName: 'Setup Conda Env'
timeoutInMinutes: 10

- script: |
. /anaconda/etc/profile.d/conda.sh && \
conda activate nightly_reco_base && \
echo "Smoke tests" && \
pytest tests/smoke -m "smoke and not spark and not gpu" --junitxml=reports/test-smoke.xml && \
pytest tests/smoke --durations 0 -m "smoke and not spark and not gpu" --junitxml=reports/test-smoke.xml && \
echo "Integration tests" && \
pytest tests/integration -m "integration and not spark and not gpu" --junitxml=reports/test-integration.xml && \
pytest tests/integration --durations 0 -m "integration and not spark and not gpu" --junitxml=reports/test-integration.xml && \
conda deactivate
displayName: 'Run Tests'
timeoutInMinutes: 180

- task: PublishTestResults@2
displayName: 'Publish Test Results '
Expand All @@ -57,5 +55,3 @@ jobs:
displayName: 'Conda remove'
continueOnError: true
condition: succeededOrFailed()
timeoutInMinutes: 10

9 changes: 3 additions & 6 deletions tests/ci/azure_pipeline_test/dsvm_nightly_linux_gpu.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ pr: none

jobs:
- job: nightly
displayName : 'Nightly tests Linux GPU'
displayName: 'Nightly tests Linux GPU'
timeoutInMinutes: 180 # how long to run the job before automatically cancelling
pool:
name: recolinuxpool
Expand All @@ -31,18 +31,16 @@ jobs:
python ./scripts/generate_conda_file.py --gpu --name nightly_reco_gpu && \
conda env create --quiet -f nightly_reco_gpu.yaml 2> log
displayName: 'Setup Conda Env'
timeoutInMinutes: 10

- script: |
. /anaconda/etc/profile.d/conda.sh && \
conda activate nightly_reco_gpu && \
echo "Smoke tests" && \
pytest tests/smoke -m "smoke and not spark and gpu" --junitxml=reports/test-smoke.xml && \
pytest tests/smoke --durations 0 -m "smoke and not spark and gpu" --junitxml=reports/test-smoke.xml && \
echo "Integration tests" && \
pytest tests/integration -m "integration and not spark and gpu" --junitxml=reports/test-integration.xml && \
pytest tests/integration --durations 0 -m "integration and not spark and gpu" --junitxml=reports/test-integration.xml && \
conda deactivate
displayName: 'Run Tests'
timeoutInMinutes: 180

- task: PublishTestResults@2
displayName: 'Publish Test Results '
Expand All @@ -57,5 +55,4 @@ jobs:
displayName: 'Conda remove'
continueOnError: true
condition: succeededOrFailed()
timeoutInMinutes: 10

9 changes: 3 additions & 6 deletions tests/ci/azure_pipeline_test/dsvm_nightly_linux_pyspark.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ pr: none

jobs:
- job: nightly
displayName : 'Nightly tests Linux Spark'
displayName: 'Nightly tests Linux Spark'
timeoutInMinutes: 180 # how long to run the job before automatically cancelling
pool:
name: recolinuxpool
Expand All @@ -31,18 +31,16 @@ jobs:
python ./scripts/generate_conda_file.py --pyspark --name nightly_reco_pyspark && \
conda env create --quiet -f nightly_reco_pyspark.yaml 2> log
displayName: 'Setup Conda Env'
timeoutInMinutes: 10

- script: |
. /anaconda/etc/profile.d/conda.sh && \
conda activate nightly_reco_pyspark && \
echo "Smoke tests" && \
pytest tests/smoke -m "smoke and spark and not gpu" --junitxml=reports/test-smoke.xml && \
pytest tests/smoke --durations 0 -m "smoke and spark and not gpu" --junitxml=reports/test-smoke.xml && \
echo "Integration tests" && \
pytest tests/integration -m "integration and spark and not gpu" --junitxml=reports/test-integration.xml && \
pytest tests/integration --durations 0 -m "integration and spark and not gpu" --junitxml=reports/test-integration.xml && \
conda deactivate
displayName: 'Run Tests'
timeoutInMinutes: 180

- task: PublishTestResults@2
displayName: 'Publish Test Results '
Expand All @@ -57,4 +55,3 @@ jobs:
displayName: 'Conda remove'
continueOnError: true
condition: succeededOrFailed()
timeoutInMinutes: 10
94 changes: 46 additions & 48 deletions tests/ci/azure_pipeline_test/dsvm_nightly_win_cpu.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,51 +12,49 @@ trigger: none

pr: none

pool:
name: RecommendersAgentPoolWin
timeoutInMinutes: 180
cancelTimeoutInMinutes: 180

steps:
- script: |
call conda env remove -n nightly_reco_base
rmdir /s /q C:\Anaconda\envs\nightly_reco_base
displayName: 'Remove Conda Env if it exists'

- script: |
python ./scripts/generate_conda_file.py --name nightly_reco_base
call conda env create -f nightly_reco_base.yaml
displayName: 'Setup Conda Env'
timeoutInMinutes: 10

- script: |
call conda activate nightly_reco_base
echo "Smoke tests"
pytest tests/smoke -m "smoke and not spark and not gpu" --junitxml=reports/test-smoke.xml
echo "Integration tests"
pytest tests/integration -m "integration and not spark and not gpu" --junitxml=reports/test-integration.xml
conda deactivate
displayName: 'Run Tests'

- task: PublishTestResults@2
displayName: 'Publish Test Results '
inputs:
testResultsFiles: '**/test-*.xml'
failTaskOnFailedTests: true
condition: succeededOrFailed()

- script: |
call conda env remove -n nightly_reco_base -y
rmdir /s /q C:\Anaconda\envs\nightly_reco_base

workingDirectory: tests
displayName: 'Conda remove'
continueOnError: true
condition: succeededOrFailed()
timeoutInMinutes: 10

- script: |
del /q /S %LOCALAPPDATA%\Temp\*
for /d %%i in (%LOCALAPPDATA%\Temp\*) do @rmdir /s /q "%%i"
displayName: 'Remove Temp Files'
condition: succeededOrFailed()
jobs:
- job: nightly
displayName: 'Nightly tests Windows CPU'
timeoutInMinutes: 180 # how long to run the job before automatically cancelling
pool:
name: RecommendersAgentPoolWin

steps:
- script: |
call conda env remove -n nightly_reco_base -y
rmdir /s /q C:\Anaconda\envs\nightly_reco_base
displayName: 'Remove Conda Env if it exists'

- script: |
python ./scripts/generate_conda_file.py --name nightly_reco_base
call conda env create -f nightly_reco_base.yaml
displayName: 'Setup Conda Env'

- script: |
call conda activate nightly_reco_base
echo "Smoke tests"
pytest tests/smoke --durations 0 -m "smoke and not spark and not gpu" --junitxml=reports/test-smoke.xml
echo "Integration tests"
pytest tests/integration --durations 0 -m "integration and not spark and not gpu" --junitxml=reports/test-integration.xml
displayName: 'Run Tests'

- task: PublishTestResults@2
displayName: 'Publish Test Results '
inputs:
testResultsFiles: '**/test-*.xml'
failTaskOnFailedTests: true
condition: succeededOrFailed()

- script: |
call conda env remove -n nightly_reco_base -y
rmdir /s /q C:\Anaconda\envs\nightly_reco_base
workingDirectory: tests
displayName: 'Conda remove'
continueOnError: true
condition: succeededOrFailed()

- script: |
del /q /S %LOCALAPPDATA%\Temp\*
for /d %%i in (%LOCALAPPDATA%\Temp\*) do @rmdir /s /q "%%i"
displayName: 'Remove Temp Files'
condition: succeededOrFailed()
80 changes: 39 additions & 41 deletions tests/ci/azure_pipeline_test/dsvm_nightly_win_gpu.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,44 +12,42 @@ trigger: none

pr: none

pool:
name: RecommendersAgentPoolWin
timeoutInMinutes: 180
cancelTimeoutInMinutes: 180

steps:
- script: |
call conda env remove -n nightly_reco_gpu
rmdir /s /q C:\Anaconda\envs\nightly_reco_gpu
python ./scripts/generate_conda_file.py --gpu --name nightly_reco_gpu
conda env create --quiet -f nightly_reco_gpu.yaml --verbose

displayName: 'Setup Conda Env'
timeoutInMinutes: 10

- script: |
call conda activate nightly_reco_gpu
echo "Smoke tests"
pytest tests/smoke -m "smoke and not spark and gpu" --junitxml=reports/test-smoke.xml
echo "Integration tests"
pytest tests/integration -m "integration and not spark and gpu" --junitxml=reports/test-integration.xml
call conda deactivate

displayName: 'Run python smoke and integration tests'
timeoutInMinutes: 180

- task: PublishTestResults@2
displayName: 'Publish Test Results **/test-*.xml'
inputs:
testResultsFiles: '**/test-*.xml'
failTaskOnFailedTests: true
condition: succeededOrFailed()

- script: |
call conda env remove -n nightly_reco_gpu -y
rmdir /s /q C:\Anaconda\envs\nightly_reco_gpu

workingDirectory: tests
displayName: 'Conda remove'
continueOnError: true
timeoutInMinutes: 10
jobs:
- job: nightly
displayName: 'Nightly tests Windows GPU'
timeoutInMinutes: 180 # how long to run the job before automatically cancelling
pool:
name: RecommendersAgentPoolWin

steps:
- script: |
call conda env remove -n nightly_reco_gpu -y
rmdir /s /q C:\Anaconda\envs\nightly_reco_gpu
displayName: 'Remove Conda Env if it exists'

- script: |
python ./scripts/generate_conda_file.py --gpu --name nightly_reco_gpu
conda env create --quiet -f nightly_reco_gpu.yaml --verbose
displayName: 'Setup Conda Env'

- script: |
call conda activate nightly_reco_gpu
echo "Smoke tests"
pytest tests/smoke --durations 0 -m "smoke and not spark and gpu" --junitxml=reports/test-smoke.xml
echo "Integration tests"
pytest tests/integration --durations 0 -m "integration and not spark and gpu" --junitxml=reports/test-integration.xml
displayName: 'Run python smoke and integration tests'

- task: PublishTestResults@2
displayName: 'Publish Test Results **/test-*.xml'
inputs:
testResultsFiles: '**/test-*.xml'
failTaskOnFailedTests: true
condition: succeededOrFailed()

- script: |
call conda env remove -n nightly_reco_gpu -y
rmdir /s /q C:\Anaconda\envs\nightly_reco_gpu
workingDirectory: tests
displayName: 'Conda remove'
continueOnError: true
88 changes: 44 additions & 44 deletions tests/ci/azure_pipeline_test/dsvm_nightly_win_pyspark.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,48 +12,48 @@ trigger: none

pr: none

pool:
name: RecommendersAgentPoolWin
timeoutInMinutes: 180
cancelTimeoutInMinutes: 180

steps:
- script: |
call conda env remove -n nightly_reco_pyspark
rmdir /s /q C:\Anaconda\envs\nightly_reco_pyspark
python ./scripts/generate_conda_file.py --pyspark --name nightly_reco_pyspark
conda env create --quiet -f nightly_reco_pyspark.yaml --verbose
displayName: 'Setup Conda Env'
timeoutInMinutes: 10

- script: |
call conda activate nightly_reco_pyspark
set SPARK_HOME=
echo "Smoke tests"
pytest tests/smoke -m "smoke and spark and not gpu" --junitxml=reports/test-smoke.xml
echo "Integration tests"
pytest tests/integration -m "integration and spark and not gpu" --junitxml=reports/test-integration.xml
conda deactivate
displayName: 'Run pyspark smoke and integration tests'
timeoutInMinutes: 180
env:
PYSPARK_PYTHON: c:\anaconda\envs\reco_pyspark\python.exe
PYSPARK_DRIVER_PYTHON: c:\anaconda\envs\reco_pyspark\python.exe

- task: PublishTestResults@2
displayName: 'Publish Test Results '
inputs:
testResultsFiles: '**/test-*.xml'
failTaskOnFailedTests: true
condition: succeededOrFailed()

- script: |
call conda env remove -n nightly_reco_pyspark -y
rmdir /s /q C:\Anaconda\envs\nightly_reco_pyspark

workingDirectory: tests
displayName: 'Conda remove'
continueOnError: true
condition: succeededOrFailed()
timeoutInMinutes: 10
jobs:
- job: nightly
displayName: 'Nightly tests Windows Pyspark'
timeoutInMinutes: 180 # how long to run the job before automatically cancelling
pool:
name: RecommendersAgentPoolWin

steps:
- script: |
call conda env remove -n nightly_reco_pyspark -y
rmdir /s /q C:\Anaconda\envs\nightly_reco_pyspark
displayName: 'Remove Conda Env if it exists'

- script: |
python ./scripts/generate_conda_file.py --pyspark --name nightly_reco_pyspark
conda env create --quiet -f nightly_reco_pyspark.yaml --verbose
displayName: 'Setup Conda Env'

- script: |
call conda activate nightly_reco_pyspark
set SPARK_HOME=
echo "Smoke tests"
pytest tests/smoke --durations 0 -m "smoke and spark and not gpu" --junitxml=reports/test-smoke.xml
echo "Integration tests"
pytest tests/integration --durations 0 -m "integration and spark and not gpu" --junitxml=reports/test-integration.xml
displayName: 'Run pyspark smoke and integration tests'
env:
PYSPARK_PYTHON: c:\anaconda\envs\reco_pyspark\python.exe
PYSPARK_DRIVER_PYTHON: c:\anaconda\envs\reco_pyspark\python.exe

- task: PublishTestResults@2
displayName: 'Publish Test Results '
inputs:
testResultsFiles: '**/test-*.xml'
failTaskOnFailedTests: true
condition: succeededOrFailed()

- script: |
call conda env remove -n nightly_reco_pyspark -y
rmdir /s /q C:\Anaconda\envs\nightly_reco_pyspark
workingDirectory: tests
displayName: 'Conda remove'
continueOnError: true
condition: succeededOrFailed()

Loading