Skip to content

Commit

Permalink
Add heavy unit test workflow and mark tests accordingly
Browse files Browse the repository at this point in the history
  • Loading branch information
eugene123tw committed Jan 6, 2025
1 parent 04f1205 commit b950267
Show file tree
Hide file tree
Showing 4 changed files with 72 additions and 2 deletions.
46 changes: 46 additions & 0 deletions .github/workflows/pre_merge.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,52 @@ jobs:
curl -Os https://uploader.codecov.io/latest/linux/codecov
chmod +x codecov
./codecov -t ${{ secrets.CODECOV_TOKEN }} --sha $COMMIT_ID -U $HTTP_PROXY -f .tox/coverage_unit-test-${{ matrix.tox-env }}.xml -F ${{ matrix.tox-env }}
Heavy-Unit-Test:
runs-on: [otx-gpu-a10g-1]
needs: Code-Quality-Checks
timeout-minutes: 120
strategy:
fail-fast: false
matrix:
include:
- python-version: "3.10"
tox-env: "py310"
- python-version: "3.11"
tox-env: "py311"
name: Heavy-Unit-Test-with-Python${{ matrix.python-version }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Python
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0
with:
python-version: ${{ matrix.python-version }}
- name: Install tox
run: |
python -m pip install --require-hashes --no-deps -r .ci/requirements.txt
pip-compile --generate-hashes --output-file=/tmp/requirements.txt --extra=ci_tox pyproject.toml
python -m pip install --require-hashes --no-deps -r /tmp/requirements.txt
rm /tmp/requirements.txt
- name: Run unit test
run: tox -vv -e heavy-unit-test-${{ matrix.tox-env }}
- name: Upload coverage reports to Codecov
run: |
# If the workflow is triggered from PR then it gets the commit id from the PR.
# else it uses the commit id of the latest commit. This is because the commit
# of the checked-out branch/commit does not exist in the tree as it is grafted.
# Also note: GitHub does not pass secrets to pipelines triggered from a fork.
# This means that upload will fail for PRs from forks.
if [ -n "${{ github.event.pull_request.head.sha }}" ]
then
COMMIT_ID=${{ github.event.pull_request.head.sha }}
else
COMMIT_ID=${{ github.sha }}
fi
# current version of codecov-action does not support uploading reports through the proxy
# so we use the latest version of codecov uploader binary
curl -Os https://uploader.codecov.io/latest/linux/codecov
chmod +x codecov
./codecov -t ${{ secrets.CODECOV_TOKEN }} --sha $COMMIT_ID -U $HTTP_PROXY -f .tox/coverage_heavy-unit-test-${{ matrix.tox-env }}.xml -F ${{ matrix.tox-env }}
Integration-Test:
if: |
github.event.pull_request.draft == false &&
Expand Down
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -388,6 +388,7 @@ convention = "google"
markers = [
"gpu", # mark tests which require NVIDIA GPU
"cpu",
"xpu", # mark tests which require Intel dGPU
"xpu", # mark tests which require Intel dGPU,
"heavy", # heavy unit tests which require better CI machines
]
python_files = "tests/**/*.py"
11 changes: 11 additions & 0 deletions tests/unit/core/data/test_tiling.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,7 @@ def fxt_data_config(self, fxt_data_roots) -> dict[dict]:
},
}

@pytest.mark.heavy()
def det_dummy_forward(self, x: DetBatchDataEntity) -> DetBatchPredEntity:
"""Dummy detection forward function for testing.
Expand Down Expand Up @@ -178,6 +179,7 @@ def det_dummy_forward(self, x: DetBatchDataEntity) -> DetBatchPredEntity:

return pred_entity

@pytest.mark.heavy()
def inst_seg_dummy_forward(self, x: InstanceSegBatchDataEntity) -> InstanceSegBatchPredEntity:
"""Dummy instance segmantation forward function for testing.
Expand Down Expand Up @@ -240,6 +242,7 @@ def inst_seg_dummy_forward(self, x: InstanceSegBatchDataEntity) -> InstanceSegBa

return pred_entity

@pytest.mark.heavy()
@pytest.mark.parametrize(
"task",
[OTXTaskType.DETECTION, OTXTaskType.INSTANCE_SEGMENTATION, OTXTaskType.SEMANTIC_SEGMENTATION],
Expand Down Expand Up @@ -381,6 +384,7 @@ def test_tile_sampler(self, fxt_data_config):

assert sampled_count == count, "Sampled count should be equal to the count of the dataloader batch size"

@pytest.mark.heavy()
def test_train_dataloader(self, fxt_data_config) -> None:
for task, data_config in fxt_data_config.items():
# Enable tile adapter
Expand All @@ -400,6 +404,7 @@ def test_train_dataloader(self, fxt_data_config) -> None:
else:
pytest.skip("Task not supported")

@pytest.mark.heavy()
def test_val_dataloader(self, fxt_data_config) -> None:
for task, data_config in fxt_data_config.items():
# Enable tile adapter
Expand All @@ -419,6 +424,7 @@ def test_val_dataloader(self, fxt_data_config) -> None:
else:
pytest.skip("Task not supported")

@pytest.mark.heavy()
def test_det_tile_merge(self, fxt_data_config):
data_config = fxt_data_config[OTXTaskType.DETECTION]
model = ATSS(
Expand All @@ -441,6 +447,7 @@ def test_det_tile_merge(self, fxt_data_config):
for batch in tile_datamodule.val_dataloader():
model.forward_tiles(batch)

@pytest.mark.heavy()
def test_explain_det_tile_merge(self, fxt_data_config):
data_config = fxt_data_config[OTXTaskType.DETECTION]
model = ATSS(
Expand All @@ -465,6 +472,7 @@ def test_explain_det_tile_merge(self, fxt_data_config):
assert prediction.saliency_map[0].ndim == 3
self.explain_mode = False

@pytest.mark.heavy()
def test_instseg_tile_merge(self, fxt_data_config):
data_config = fxt_data_config[OTXTaskType.INSTANCE_SEGMENTATION]
model = MaskRCNN(label_info=3, model_name="maskrcnn_efficientnet_b2b", input_size=(256, 256))
Expand All @@ -484,6 +492,7 @@ def test_instseg_tile_merge(self, fxt_data_config):
for batch in tile_datamodule.val_dataloader():
model.forward_tiles(batch)

@pytest.mark.heavy()
def test_explain_instseg_tile_merge(self, fxt_data_config):
data_config = fxt_data_config[OTXTaskType.INSTANCE_SEGMENTATION]
model = MaskRCNN(label_info=3, model_name="maskrcnn_efficientnet_b2b", input_size=(256, 256))
Expand All @@ -505,6 +514,7 @@ def test_explain_instseg_tile_merge(self, fxt_data_config):
assert prediction.saliency_map[0].ndim == 3
self.explain_mode = False

@pytest.mark.heavy()
def test_seg_tile_merge(self, fxt_data_config):
data_config = fxt_data_config[OTXTaskType.SEMANTIC_SEGMENTATION]
model = LiteHRNet(label_info=3, model_name="lite_hrnet_18")
Expand All @@ -523,6 +533,7 @@ def test_seg_tile_merge(self, fxt_data_config):
for batch in tile_datamodule.val_dataloader():
model.forward_tiles(batch)

@pytest.mark.heavy()
def test_seg_tiler(self, mocker):
rng = np.random.default_rng()
rnd_tile_size = rng.integers(low=100, high=500)
Expand Down
14 changes: 13 additions & 1 deletion tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,19 @@ deps =
.[base,dev]
commands =
; Run Unit-Test with coverage report.
pytest tests/unit \
pytest -m "not heavy" tests/unit \
--cov=otx \
--cov-report=xml:{toxworkdir}/coverage_{envname}.xml \
--cov-report=term-missing \
--cov-fail-under=0 \
{posargs}

[testenv:heavy-unit-test-{py310, py311}]
deps =
.[base,dev]
commands =
; Run Unit-Test with coverage report.
pytest -m "heavy" tests/unit \
--cov=otx \
--cov-report=xml:{toxworkdir}/coverage_{envname}.xml \
--cov-report=term-missing \
Expand Down

0 comments on commit b950267

Please sign in to comment.