-
Notifications
You must be signed in to change notification settings - Fork 108
143 lines (124 loc) · 3.99 KB
/
ci.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
name: CI
# On every pull request, but only on push to main
on:
push:
branches:
- main
tags:
- '*'
pull_request:
env:
LATEST_PY_VERSION: '3.11'
jobs:
tests:
runs-on: ubuntu-latest
strategy:
matrix:
python-version:
- '3.8'
- '3.9'
- '3.10'
- '3.11'
- '3.12'
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install -e ".[test]"
- name: Run pre-commit
if: ${{ matrix.python-version == env.LATEST_PY_VERSION }}
run: |
python -m pip install pre-commit
pre-commit run --all-files
- name: Run tests
run: python -m pytest --cov rio_tiler --cov-report xml --cov-report term-missing -s -vv
- name: Upload Results
if: ${{ matrix.python-version == env.LATEST_PY_VERSION }}
uses: codecov/codecov-action@v4
with:
file: ./coverage.xml
flags: unittests
name: ${{ matrix.python-version }}
fail_ci_if_error: false
benchmark:
needs: [tests]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install -e ".[benchmark]"
- name: Run Benchmark
run: python -m pytest tests/benchmarks/benchmarks.py --benchmark-only --benchmark-columns 'min, max, mean, median' --benchmark-sort 'min' --benchmark-json output.json
- name: Store and Compare benchmark result
uses: benchmark-action/github-action-benchmark@v1
with:
name: rio-tiler Benchmarks
tool: 'pytest'
output-file-path: output.json
alert-threshold: '150%'
comment-on-alert: true
fail-on-alert: false
# GitHub API token to make a commit comment
github-token: ${{ secrets.GITHUB_TOKEN }}
gh-pages-branch: 'gh-benchmarks'
# Make a commit on `gh-pages` only if main
auto-push: ${{ github.ref == 'refs/heads/main' }}
benchmark-data-dir-path: dev/benchmarks
benchmark-requests:
needs: [tests]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install -e ".[tilebench]"
- name: Run Benchmark
run: python -m pytest tests/benchmarks/requests.py -s -vv
publish:
needs: [tests]
runs-on: ubuntu-latest
if: startsWith(github.event.ref, 'refs/tags') || github.event_name == 'release'
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v1
with:
python-version: ${{ env.LATEST_PY_VERSION }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install hatch
python -m hatch build
- name: Set tag version
id: tag
run: |
echo "version=${GITHUB_REF#refs/*/}"
echo "version=${GITHUB_REF#refs/*/}" >> $GITHUB_OUTPUT
- name: Set module version
id: module
run: |
hatch --quiet version
echo "version=$(hatch --quiet version)" >> $GITHUB_OUTPUT
- name: Build and publish
if: ${{ steps.tag.outputs.version }} == ${{ steps.module.outputs.version}}
env:
HATCH_INDEX_USER: ${{ secrets.PYPI_USERNAME }}
HATCH_INDEX_AUTH: ${{ secrets.PYPI_PASSWORD }}
run: |
python -m hatch publish