Skip to content

Commit

Permalink
Merge branch 'main' into validity-array
Browse files Browse the repository at this point in the history
  • Loading branch information
Richard Wesley committed Jul 11, 2024
2 parents e0de8ee + b3953c4 commit f2bd40d
Show file tree
Hide file tree
Showing 245 changed files with 10,641 additions and 1,351 deletions.
3 changes: 2 additions & 1 deletion .github/workflows/Android.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ jobs:
arch: [ armeabi-v7a, arm64-v8a ]

env:
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true
ENABLE_EXTENSION_AUTOLOADING: 1
ENABLE_EXTENSION_AUTOINSTALL: 1
GEN: ninja
Expand Down Expand Up @@ -98,4 +99,4 @@ jobs:
with:
name: duckdb-binaries-android-${{matrix.arch}}
path: |
libduckdb-android_${{matrix.arch}}.zip
libduckdb-android_${{matrix.arch}}.zip
15 changes: 8 additions & 7 deletions .github/workflows/LinuxRelease.yml
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ jobs:
DEBUG_STACKTRACE: 1
FORCE_WARN_UNUSED: 1
DUCKDB_RUN_PARALLEL_CSV_TESTS: 1
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true

steps:
- name: Handrolled checkout
Expand Down Expand Up @@ -131,7 +132,6 @@ jobs:
./scripts/upload-assets-to-staging.sh github_release libduckdb-src.zip libduckdb-linux-amd64.zip duckdb_cli-linux-amd64.zip
- uses: actions/upload-artifact@v3
if: ${{ false }}
with:
name: duckdb-binaries-linux
path: |
Expand All @@ -141,8 +141,7 @@ jobs:
linux-release-aarch64:
# Builds binaries for linux_arm64
name: Linux (aarch64)
# if: startsWith(github.ref, 'refs/tags/v') || github.ref == 'refs/heads/main' || github.repository != 'duckdb/duckdb'
if: ${{ false }}
if: startsWith(github.ref, 'refs/tags/v') || github.ref == 'refs/heads/main' || github.repository != 'duckdb/duckdb'
runs-on: ubuntu-latest
needs: linux-release-64
container: ubuntu:18.04
Expand All @@ -155,6 +154,7 @@ jobs:
TREAT_WARNINGS_AS_ERRORS: 1
FORCE_WARN_UNUSED: 1
DUCKDB_PLATFORM: linux_arm64
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true

steps:
- uses: actions/checkout@v3
Expand Down Expand Up @@ -184,7 +184,6 @@ jobs:
./scripts/upload-assets-to-staging.sh github_release libduckdb-linux-aarch64.zip duckdb_cli-linux-aarch64.zip
- uses: actions/upload-artifact@v3
if: ${{ false }}
with:
name: duckdb-binaries-linux-aarch64
path: |
Expand All @@ -197,9 +196,10 @@ jobs:
# Builds extensions for linux_amd64
name: Linux Extensions (x64)
runs-on: ubuntu-latest
if: ${{ false }}
container: ubuntu:18.04
needs: linux-release-64
env:
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true

steps:
- uses: actions/checkout@v3
Expand Down Expand Up @@ -236,11 +236,12 @@ jobs:
linux-extensions-64-aarch64:
# Builds extensions for linux_arm64
name: Linux Extensions (aarch64)
# if: startsWith(github.ref, 'refs/tags/v') || github.ref == 'refs/heads/main'
if: ${{ false }}
if: startsWith(github.ref, 'refs/tags/v') || github.ref == 'refs/heads/main'
runs-on: ubuntu-latest
container: ubuntu:18.04
needs: linux-release-64
env:
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true

steps:
- uses: actions/checkout@v3
Expand Down
4 changes: 4 additions & 0 deletions .github/workflows/NightlyTests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,8 @@ jobs:
needs: linux-memory-leaks
runs-on: ubuntu-latest
container: quay.io/pypa/manylinux2014_x86_64
env:
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true
steps:
- uses: actions/checkout@v3
with:
Expand Down Expand Up @@ -181,6 +183,7 @@ jobs:
env:
CC: /usr/bin/gcc
CXX: /usr/bin/g++
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true

steps:
- uses: actions/checkout@v3
Expand Down Expand Up @@ -252,6 +255,7 @@ jobs:
GEN: ninja
DUCKDEBUG: 1
ASAN_OPTIONS: detect_leaks=0
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true

steps:
- uses: actions/checkout@v3
Expand Down
10 changes: 8 additions & 2 deletions .github/workflows/Python.yml
Original file line number Diff line number Diff line change
Expand Up @@ -117,11 +117,11 @@ jobs:
manylinux-extensions-x64:
name: Linux Extensions (linux_amd64_gcc4)
runs-on: ubuntu-latest
if: false
container: quay.io/pypa/manylinux2014_x86_64
needs: linux-python3-9
env:
GEN: ninja
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true

steps:
- uses: actions/checkout@v3
Expand Down Expand Up @@ -185,6 +185,7 @@ jobs:
python_build: 'cp311-*'
- isRelease: false
arch: aarch64
needs: manylinux-extensions-x64
env:
CIBW_BUILD: ${{ matrix.python_build}}
CIBW_SKIP: '*-musllinux_aarch64'
Expand Down Expand Up @@ -231,6 +232,12 @@ jobs:
pip install 'cibuildwheel>=2.16.2' build
python -m pip install numpy --config-settings=setup-args="-Dallow-noblas=true"
- uses: actions/download-artifact@v3
if: ${{ matrix.arch == 'x86_64' }}
with:
name: manylinux-extensions-x64
path: tools/pythonpkg

- name: List extensions to be tested
shell: bash
run: |
Expand Down Expand Up @@ -454,7 +461,6 @@ jobs:
- osx-python3
- win-python3
- linux-python3
- manylinux-extensions-x64
# Note that want to run this by default ONLY if no override_git_describe is provided
# This means we are not staging a release
if: (( startsWith(github.ref, 'refs/tags/v') || github.ref == 'refs/heads/main' )) && (( inputs.override_git_describe == '' )) && (( github.repository == 'duckdb/duckdb' ))
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# name: benchmark/micro/join/join_order_optimizer_should_respect_limit.benchmark
# description: If a constant value limit operator exists, is should have influence on the estimated cardinality
# group: [join]

name join limit
group join

load
create table t_left as select (random() * 1000000000)::INT a from range(400000);
create table t_right as select range b from range(1000000000);

run
select * from t_left, (select * from t_right limit 10000) where a = b;
4 changes: 2 additions & 2 deletions benchmark/micro/limit/parallel_limit.benchmark
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ subgroup limit

load
CREATE TABLE integers AS SELECT * FROM range(100000000) tbl(i);
CREATE TABLE other_table AS SELECT 337 i UNION ALL SELECT 948247 UNION ALL SELECT 17797934 UNION ALL SELECT 99999998 UNION ALL SELECT 99999999
CREATE TABLE other_table AS SELECT 337 i UNION ALL SELECT 948247 UNION ALL SELECT 17797934 UNION ALL SELECT 99999998 UNION ALL SELECT 99999999;

run
SELECT * FROM integers WHERE i IN (SELECT * FROM other_table) LIMIT 4
SELECT * FROM integers WHERE i IN (SELECT * FROM other_table) LIMIT 4;
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ name batched_arrow_result
group micro
subgroup result_collection

resultmode arrow
resultmode arrow 10000

# By not disabling order preservation we make use of the batched collector
load
Expand Down
21 changes: 21 additions & 0 deletions benchmark/tpch/join/join_filter_pushdown.benchmark
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# name: benchmark/tpch/join/join_filter_pushdown.benchmark
# description: Join filter pushdown
# group: [join]

name Join Filter Pushdown
group join
subgroup tpch

require tpch

cache tpch_sf1.duckdb

load
CALL dbgen(sf=1);

run
SELECT * from lineitem WHERE l_orderkey=(SELECT MAX(l_orderkey) FROM lineitem) ORDER BY ALL

result IIIIIIIIIIIIIIII
6000000 32255 2256 1 5.00 5936.25 0.04 0.03 N O 1996-11-02 1996-11-19 1996-12-01 TAKE BACK RETURN MAIL riously pe
6000000 96127 6128 2 28.00 31447.36 0.01 0.02 N O 1996-09-22 1996-10-01 1996-10-21 NONE AIR pecial excuses nag evenly f
21 changes: 21 additions & 0 deletions benchmark/tpch/join/partition_pushdown.benchmark
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# name: benchmark/tpch/join/partition_pushdown.benchmark
# description: Join filter pushdown into hive partitions
# group: [join]

name Hive Filter Join Filter Pushdown
group join
subgroup tpch

require parquet

require tpch

load
CALL dbgen(sf=1);
COPY (FROM lineitem ORDER BY l_shipdate) TO 'lineitem_partitioned_shipdate' (FORMAT PARQUET, PARTITION_BY l_shipdate);

run
SELECT COUNT(*) from 'lineitem_partitioned_shipdate/**/*.parquet' WHERE l_shipdate=(SELECT MAX(l_shipdate) FROM lineitem)

result I
18
Loading

0 comments on commit f2bd40d

Please sign in to comment.