-
Notifications
You must be signed in to change notification settings - Fork 354
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Install the rerun-sdk in CI using --no-index and split out linux whee…
…l build to run first. (#1838) * Install the rerun-sdk by the expected version * Fix comment * typo * Use --no-index when installing the rerun wheel * Use the cargo_version, not the new_version * Split dependency install into its own step * Don't use force-reinstall * Refactor setting of expected_version variable. * Use bash when setting env * Always run the linux job first and use its rrds for the other wheels
- Loading branch information
Showing
2 changed files
with
193 additions
and
39 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -63,7 +63,167 @@ jobs: | |
just py-requirements | ||
# --------------------------------------------------------------------------- | ||
# We need one wheel-build to be special so the other builds (namely mac arm) can use its rrd | ||
# This copy-paste is awful, but we'll refactor the build soon. | ||
wheels-linux: | ||
if: github.event_name == 'push' || github.event.inputs.force_build_wheel | ||
name: Build Python Wheels (Linux) | ||
runs-on: ubuntu-latest | ||
container: | ||
image: rerunio/ci_docker:0.6 | ||
steps: | ||
- uses: actions/checkout@v3 | ||
|
||
# These should already be in the docker container, but run for good measure. A no-op install | ||
# should be fast, and this way things don't break if we add new packages without rebuilding | ||
# docker | ||
- name: Cache APT Packages | ||
uses: awalsh128/[email protected] | ||
with: | ||
packages: ${{ env.UBUNTU_REQUIRED_PKGS }} | ||
version: 2.0 # Increment this to pull newer packages | ||
execute_install_scripts: true | ||
|
||
- name: Set up cargo cache | ||
uses: Swatinem/rust-cache@v2 | ||
with: | ||
env-vars: CARGO CC CFLAGS CXX CMAKE RUST CACHE_KEY | ||
# Don't update the cache -- it will be updated by the lint job | ||
# TODO(jleibs): this job will likely run before rust.yml updates | ||
# the cache. Better cross-job sequencing would be nice here | ||
save-if: False | ||
|
||
# These should already be in the docker container, but run for good measure. A no-op install | ||
# should be fast, and this way things don't break if we add new packages without rebuilding | ||
# docker | ||
- run: pip install -r rerun_py/requirements-build.txt | ||
|
||
# ---------------------------------------------------------------------------------- | ||
|
||
- name: Patch Cargo.toml for pre-release | ||
if: github.ref == 'refs/heads/main' | ||
# After patching the pre-release version, run cargo update. | ||
# This updates the cargo.lock file with the new version numbers and keeps the wheel build from failing | ||
run: | | ||
python3 scripts/version_util.py --patch_prerelease | ||
cargo update -w | ||
- name: Version check for tagged-release | ||
if: startsWith(github.ref, 'refs/tags/v') | ||
# This call to version_util.py will assert version from Cargo.toml matches git tagged version vX.Y.Z | ||
run: | | ||
python3 scripts/version_util.py --check_version | ||
- name: Store the expected version | ||
# Find the current cargo version and store it in the GITHUB_ENV var: `expected_version` | ||
shell: bash | ||
run: | | ||
echo "expected_version=$(python3 scripts/version_util.py --bare_cargo_version)" >> $GITHUB_ENV | ||
- name: Build Wheel | ||
uses: PyO3/maturin-action@v1 | ||
with: | ||
maturin-version: "0.14.10" | ||
manylinux: manylinux_2_31 | ||
container: off | ||
command: build | ||
args: | | ||
--manifest-path rerun_py/Cargo.toml | ||
--release | ||
--target x86_64-unknown-linux-gnu | ||
--no-default-features | ||
--features pypi | ||
--out pre-dist | ||
- name: Install wheel dependencies | ||
# First we install the dependencies manually so we can use `--no-index` when installing the wheel. | ||
# This needs to be a separate step for some reason or the following step fails | ||
# TODO(jleibs): pull these deps from pyproject.toml | ||
# TODO(jleibs): understand why deps can't be installed in the same step as the wheel | ||
shell: bash | ||
run: | | ||
pip install deprecated numpy>=1.23 pyarrow==10.0.1 | ||
- name: Install built wheel | ||
# Now install the wheel using a specific version and --no-index to guarantee we get the version from | ||
# the pre-dist folder. Note we don't use --force-reinstall here because --no-index means it wouldn't | ||
# find the dependencies to reinstall them. | ||
shell: bash | ||
run: | | ||
pip uninstall rerun-sdk | ||
pip install rerun-sdk==${{ env.expected_version }} --no-index --find-links pre-dist | ||
- name: Verify built wheel version | ||
shell: bash | ||
run: | | ||
python3 -m rerun --version | ||
which rerun | ||
rerun --version | ||
- name: Run unit tests | ||
shell: bash | ||
run: cd rerun_py/tests && pytest | ||
|
||
- name: Install requriements for e2e test | ||
run: | | ||
pip install -r examples/python/api_demo/requirements.txt | ||
pip install -r examples/python/car/requirements.txt | ||
pip install -r examples/python/multithreading/requirements.txt | ||
pip install -r examples/python/plots/requirements.txt | ||
pip install -r examples/python/text_logging/requirements.txt | ||
- name: Run e2e test | ||
shell: bash | ||
run: scripts/run_python_e2e_test.py --no-build # rerun-sdk is already built and installed | ||
|
||
- name: Unpack the wheel | ||
shell: bash | ||
run: | | ||
mkdir unpack-dist | ||
wheel unpack pre-dist/*.whl --dest unpack-dist | ||
- name: Get the folder name | ||
shell: bash | ||
run: | | ||
echo "pkg_folder=$(ls unpack-dist)" >> $GITHUB_ENV | ||
- name: Cache RRD dataset | ||
id: dataset | ||
uses: actions/cache@v3 | ||
with: | ||
path: examples/python/colmap/dataset/ | ||
# TODO(jleibs): Derive this key from the invocation below | ||
key: colmap-dataset-colmap-fiat-v0 | ||
|
||
- name: Generate Embedded RRD file | ||
shell: bash | ||
# If you change the line below you should almost definitely change the `key:` line above by giving it a new, unique name | ||
run: | | ||
mkdir rrd | ||
pip install -r examples/python/colmap/requirements.txt | ||
python3 examples/python/colmap/main.py --dataset colmap_fiat --resize 800x600 --save rrd/colmap_fiat.rrd | ||
cp rrd/colmap_fiat.rrd unpack-dist/${{ env.pkg_folder }}/rerun_sdk/rerun_demo/colmap_fiat.rrd | ||
- name: Repack the wheel | ||
shell: bash | ||
run: | | ||
mkdir dist | ||
wheel pack unpack-dist/${{ env.pkg_folder }} --dest dist/ | ||
- name: Upload wheels | ||
uses: actions/upload-artifact@v3 | ||
with: | ||
name: wheels | ||
path: dist | ||
|
||
# All platforms are currently creating the same rrd file, upload one of them | ||
- name: Save RRD artifact | ||
uses: actions/upload-artifact@v3 | ||
with: | ||
name: rrd | ||
path: rrd | ||
|
||
# --------------------------------------------------------------------------- | ||
matrix-setup: | ||
# Building all the wheels is expensive, so we only run this job when we push (to main or release tags), | ||
# or if the job was manually triggered with `force_build_wheel` set to true. | ||
|
@@ -87,40 +247,26 @@ jobs: | |
shell: bash | ||
run: | | ||
matrix=() | ||
matrix+=('{"platform": "macos", "target": "x86_64-apple-darwin", "wheel_suffix": "x86_64", "runs_on": "macos-latest" },') | ||
matrix+=('{"platform": "macos", "target": "aarch64-apple-darwin", "wheel_suffix": "x86_64", "runs_on": "macos-latest" },') # NOTE: we test the x86_64 wheel AGAIN, because the runner is x86_64 | ||
matrix+=('{"platform": "windows", "target": "x86_64-pc-windows-msvc", "wheel_suffix": "", "runs_on": "windows-latest-8-cores"},') | ||
matrix+=('{"platform": "linux", "target": "x86_64-unknown-linux-gnu", "wheel_suffix": "", "runs_on": "ubuntu-latest-16-cores", container: {"image": "rerunio/ci_docker:0.6"}}') | ||
matrix+=('{"platform": "macos", "target": "x86_64-apple-darwin", "run_tests": true, "runs_on": "macos-latest" },') | ||
matrix+=('{"platform": "macos", "target": "aarch64-apple-darwin", "run_tests": false, "runs_on": "macos-latest" },') # NOTE: we can't run tests on arm since our macos runner is x86_64 | ||
matrix+=('{"platform": "windows", "target": "x86_64-pc-windows-msvc", "run_tests": true, "runs_on": "windows-latest-8-cores"},') | ||
echo "Matrix values: ${matrix[@]}" | ||
echo "matrix={\"include\":[${matrix[@]}]}" >> $GITHUB_OUTPUT | ||
wheels: | ||
name: Build Python Wheels | ||
needs: [lint, matrix-setup] | ||
name: Build Remaining Python Wheels | ||
needs: [lint, matrix-setup, wheels-linux] | ||
|
||
strategy: | ||
matrix: ${{fromJson(needs.matrix-setup.outputs.matrix)}} | ||
|
||
runs-on: ${{ matrix.runs_on }} | ||
|
||
container: ${{ matrix.container }} | ||
|
||
steps: | ||
- uses: actions/checkout@v3 | ||
|
||
# These should already be in the docker container, but run for good measure. A no-op install | ||
# should be fast, and this way things don't break if we add new packages without rebuilding | ||
# docker | ||
- name: Cache APT Packages | ||
if: matrix.platform == 'linux' | ||
uses: awalsh128/[email protected] | ||
with: | ||
packages: ${{ env.UBUNTU_REQUIRED_PKGS }} | ||
version: 2.0 # Increment this to pull newer packages | ||
execute_install_scripts: true | ||
|
||
- name: Set up cargo cache | ||
uses: Swatinem/rust-cache@v2 | ||
with: | ||
|
@@ -133,7 +279,6 @@ jobs: | |
# The pip-cache setup logic doesn't work in the ubuntu docker container | ||
# That's probably fine since we bake these deps into the container already | ||
- name: Setup python | ||
if: matrix.platform != 'linux' | ||
uses: actions/setup-python@v4 | ||
with: | ||
python-version: ${{ env.PYTHON_VERSION }} | ||
|
@@ -194,6 +339,12 @@ jobs: | |
run: | | ||
python3 scripts/version_util.py --check_version | ||
- name: Store the expected version | ||
# Find the current cargo version and store it in the GITHUB_ENV var: `expected_version` | ||
shell: bash | ||
run: | | ||
echo "expected_version=$(python3 scripts/version_util.py --bare_cargo_version)" >> $GITHUB_ENV | ||
- name: Build Wheel | ||
uses: PyO3/maturin-action@v1 | ||
with: | ||
|
@@ -210,22 +361,33 @@ jobs: | |
--out pre-dist | ||
- name: Install built wheel | ||
if: ${{ matrix.run_tests }} | ||
# First we install the dependencies manually so we can use `--no-index` when installing the wheel. | ||
# Then install the wheel using a specific version and --no-index to guarantee we get the version from | ||
# the pre-dist folder. Note we don't use --force-reinstall here because --no-index means it wouldn't | ||
# find the dependencies to reinstall them. | ||
# TODO(jleibs): pull these deps from pyproject.toml | ||
shell: bash | ||
run: | | ||
pip install pre-dist/*${{ matrix.wheel_suffix }}.whl --force-reinstall | ||
pip uninstall rerun-sdk | ||
pip install deprecated numpy>=1.23 pyarrow==10.0.1 | ||
pip install rerun-sdk==${{ env.expected_version }} --no-index --find-links pre-dist | ||
- name: Verify built wheel version | ||
if: ${{ matrix.run_tests }} | ||
shell: bash | ||
run: | | ||
python3 -m rerun --version | ||
which rerun | ||
rerun --version | ||
- name: Run unit tests | ||
if: ${{ matrix.run_tests }} | ||
shell: bash | ||
run: cd rerun_py/tests && pytest | ||
|
||
- name: Install requriements for e2e test | ||
if: ${{ matrix.run_tests }} | ||
run: | | ||
pip install -r examples/python/api_demo/requirements.txt | ||
pip install -r examples/python/car/requirements.txt | ||
|
@@ -234,6 +396,7 @@ jobs: | |
pip install -r examples/python/text_logging/requirements.txt | ||
- name: Run e2e test | ||
if: ${{ matrix.run_tests }} | ||
shell: bash | ||
run: scripts/run_python_e2e_test.py --no-build # rerun-sdk is already built and installed | ||
|
||
|
@@ -248,21 +411,16 @@ jobs: | |
run: | | ||
echo "pkg_folder=$(ls unpack-dist)" >> $GITHUB_ENV | ||
- name: Cache RRD dataset | ||
id: dataset | ||
uses: actions/cache@v3 | ||
- name: Download RRD | ||
uses: actions/download-artifact@v3 | ||
with: | ||
path: examples/python/colmap/dataset/ | ||
# TODO(jleibs): Derive this key from the invocation below | ||
key: colmap-dataset-colmap-fiat-v0 | ||
name: rrd | ||
path: rrd | ||
|
||
- name: Generate Embedded RRD file | ||
- name: Insert the rrd | ||
shell: bash | ||
# If you change the line below you should almost definitely change the `key:` line above by giving it a new, unique name | ||
run: | | ||
mkdir rrd | ||
pip install -r examples/python/colmap/requirements.txt | ||
python3 examples/python/colmap/main.py --dataset colmap_fiat --resize 800x600 --save rrd/colmap_fiat.rrd | ||
cp rrd/colmap_fiat.rrd unpack-dist/${{ env.pkg_folder }}/rerun_sdk/rerun_demo/colmap_fiat.rrd | ||
- name: Repack the wheel | ||
|
@@ -277,14 +435,6 @@ jobs: | |
name: wheels | ||
path: dist | ||
|
||
# All platforms are currently creating the same rrd file, upload one of them | ||
- name: Save RRD artifact | ||
if: matrix.platform == 'linux' | ||
uses: actions/upload-artifact@v3 | ||
with: | ||
name: rrd | ||
path: rrd | ||
|
||
# --------------------------------------------------------------------------- | ||
|
||
upload_rrd: | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
e8e2d9b
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Rust Benchmark
datastore/num_rows=1000/num_instances=1000/packed=false/insert/default
2981165
ns/iter (± 108084
)2983729
ns/iter (± 236803
)1.00
datastore/num_rows=1000/num_instances=1000/packed=false/latest_at/default
371
ns/iter (± 8
)373
ns/iter (± 2
)0.99
datastore/num_rows=1000/num_instances=1000/packed=false/latest_at_missing/primary/default
262
ns/iter (± 0
)260
ns/iter (± 2
)1.01
datastore/num_rows=1000/num_instances=1000/packed=false/latest_at_missing/secondaries/default
422
ns/iter (± 0
)421
ns/iter (± 5
)1.00
datastore/num_rows=1000/num_instances=1000/packed=false/range/default
3017443
ns/iter (± 91276
)2906501
ns/iter (± 190324
)1.04
datastore/num_rows=1000/num_instances=1000/gc/default
2429381
ns/iter (± 6220
)2424905
ns/iter (± 30247
)1.00
mono_points_arrow/generate_message_bundles
30267924
ns/iter (± 944472
)25537096
ns/iter (± 1841190
)1.19
mono_points_arrow/generate_messages
124931098
ns/iter (± 964543
)112833858
ns/iter (± 2059820
)1.11
mono_points_arrow/encode_log_msg
158759587
ns/iter (± 1801739
)144913631
ns/iter (± 2403506
)1.10
mono_points_arrow/encode_total
314771503
ns/iter (± 1998647
)285462508
ns/iter (± 2631675
)1.10
mono_points_arrow/decode_log_msg
188383161
ns/iter (± 2069529
)177851559
ns/iter (± 1902386
)1.06
mono_points_arrow/decode_message_bundles
68310884
ns/iter (± 669304
)57587572
ns/iter (± 2094906
)1.19
mono_points_arrow/decode_total
255173400
ns/iter (± 1843914
)234335805
ns/iter (± 2962071
)1.09
mono_points_arrow_batched/generate_message_bundles
22826812
ns/iter (± 1815212
)20310483
ns/iter (± 1874770
)1.12
mono_points_arrow_batched/generate_messages
4458405
ns/iter (± 270867
)4107772
ns/iter (± 394221
)1.09
mono_points_arrow_batched/encode_log_msg
1341331
ns/iter (± 5088
)1374643
ns/iter (± 12370
)0.98
mono_points_arrow_batched/encode_total
30238386
ns/iter (± 1555436
)27449206
ns/iter (± 2329110
)1.10
mono_points_arrow_batched/decode_log_msg
781137
ns/iter (± 2263
)779914
ns/iter (± 7853
)1.00
mono_points_arrow_batched/decode_message_bundles
7744633
ns/iter (± 252068
)7658651
ns/iter (± 443227
)1.01
mono_points_arrow_batched/decode_total
8909336
ns/iter (± 438417
)8709255
ns/iter (± 732431
)1.02
batch_points_arrow/generate_message_bundles
195769
ns/iter (± 370
)194632
ns/iter (± 1078
)1.01
batch_points_arrow/generate_messages
5216
ns/iter (± 28
)5102
ns/iter (± 56
)1.02
batch_points_arrow/encode_log_msg
259608
ns/iter (± 2435
)260024
ns/iter (± 3017
)1.00
batch_points_arrow/encode_total
490557
ns/iter (± 2141
)488796
ns/iter (± 8339
)1.00
batch_points_arrow/decode_log_msg
212012
ns/iter (± 821
)213170
ns/iter (± 2039
)0.99
batch_points_arrow/decode_message_bundles
1904
ns/iter (± 8
)1924
ns/iter (± 19
)0.99
batch_points_arrow/decode_total
221271
ns/iter (± 860
)223283
ns/iter (± 2848
)0.99
arrow_mono_points/insert
2480208955
ns/iter (± 8613831
)2336885135
ns/iter (± 4319013
)1.06
arrow_mono_points/query
1194269
ns/iter (± 25834
)1189206
ns/iter (± 27197
)1.00
arrow_batch_points/insert
1146727
ns/iter (± 1648
)1155818
ns/iter (± 10281
)0.99
arrow_batch_points/query
14649
ns/iter (± 151
)14331
ns/iter (± 84
)1.02
arrow_batch_vecs/insert
26298
ns/iter (± 70
)26414
ns/iter (± 363
)1.00
arrow_batch_vecs/query
325545
ns/iter (± 1672
)325474
ns/iter (± 1206
)1.00
tuid/Tuid::random
34
ns/iter (± 0
)34
ns/iter (± 0
)1
This comment was automatically generated by workflow using github-action-benchmark.