diff --git a/.asf.yaml b/.asf.yaml index 4693631be38b3..d2522ecae0b43 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -25,6 +25,7 @@ github: - davisusanibar - felipecrv - js8544 + - amoeba notifications: commits: commits@arrow.apache.org diff --git a/.env b/.env index a551e2120a6fb..6746892fd4ed8 100644 --- a/.env +++ b/.env @@ -49,7 +49,7 @@ ULIMIT_CORE=-1 ALMALINUX=8 ALPINE_LINUX=3.16 DEBIAN=11 -FEDORA=35 +FEDORA=38 UBUNTU=20.04 # Default versions for various dependencies diff --git a/.gitattributes b/.gitattributes index 69f4139c4e4f4..70007c26c8b9b 100644 --- a/.gitattributes +++ b/.gitattributes @@ -3,6 +3,9 @@ cpp/src/generated/*.cpp linguist-generated=true cpp/src/generated/*.h linguist-generated=true go/**/*.s linguist-generated=true go/arrow/unionmode_string.go linguist-generated=true +go/arrow/internal/flatbuf/*.go linguist-generated=true +go/**/*.pb.go linguist-generated=true +go/parquet/internal/gen-go/parquet/*.go linguist-generated=true r/R/RcppExports.R linguist-generated=true r/R/arrowExports.R linguist-generated=true r/src/RcppExports.cpp linguist-generated=true diff --git a/.github/workflows/comment_bot.yml b/.github/workflows/comment_bot.yml index cc9e02d955afd..f27d95c4e8cd7 100644 --- a/.github/workflows/comment_bot.yml +++ b/.github/workflows/comment_bot.yml @@ -35,13 +35,13 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: path: arrow # fetch the tags for version number generation fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: python-version: 3.8 - name: Install Archery and Crossbow dependencies @@ -60,8 +60,8 @@ jobs: if: startsWith(github.event.comment.body, '@github-actions autotune') runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - uses: r-lib/actions/pr-fetch@v2 + - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + - uses: r-lib/actions/pr-fetch@11a22a908006c25fe054c4ef0ac0436b1de3edbe # v2.6.4 with: repo-token: ${{ secrets.GITHUB_TOKEN }} - name: See what is different @@ -121,7 +121,7 @@ jobs: --clang_format_binary=clang-format-${CLANG_TOOLS} \ --exclude_glob=cpp/build-support/lint_exclusions.txt \ --source_dir=r/src --quiet --fix - - uses: r-lib/actions/setup-r@v2 + - uses: r-lib/actions/setup-r@11a22a908006c25fe054c4ef0ac0436b1de3edbe # v2.6.4 if: env.R_DOCS == 'true' || env.R_CODE == 'true' || endsWith(github.event.comment.body, 'everything') - name: Update R docs if: env.R_DOCS == 'true' || endsWith(github.event.comment.body, 'everything') @@ -149,7 +149,7 @@ jobs: git config user.name "$(git log -1 --pretty=format:%an)" git config user.email "$(git log -1 --pretty=format:%ae)" git commit -a -m 'Autoformat/render all the things [automated commit]' || echo "No changes to commit" - - uses: r-lib/actions/pr-push@v2 + - uses: r-lib/actions/pr-push@11a22a908006c25fe054c4ef0ac0436b1de3edbe # v2.6.4 with: repo-token: ${{ secrets.GITHUB_TOKEN }} @@ -158,8 +158,8 @@ jobs: if: startsWith(github.event.comment.body, '@github-actions rebase') runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - uses: r-lib/actions/pr-fetch@v2 + - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + - uses: r-lib/actions/pr-fetch@11a22a908006c25fe054c4ef0ac0436b1de3edbe # v2.6.4 with: repo-token: ${{ secrets.GITHUB_TOKEN }} - name: Rebase on ${{ github.repository }} default branch @@ -170,7 +170,7 @@ jobs: git remote add upstream https://github.com/${{ github.repository }} git fetch --unshallow upstream ${{ github.event.repository.default_branch }} git rebase upstream/${{ github.event.repository.default_branch }} - - uses: r-lib/actions/pr-push@v2 + - uses: r-lib/actions/pr-push@11a22a908006c25fe054c4ef0ac0436b1de3edbe # v2.6.4 with: repo-token: ${{ secrets.GITHUB_TOKEN }} args: "--force" @@ -182,7 +182,7 @@ jobs: if: github.event.comment.body == 'take' runs-on: ubuntu-latest steps: - - uses: actions/github-script@v6 + - uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | diff --git a/.github/workflows/cpp.yml b/.github/workflows/cpp.yml index a9361f9f51378..e6ae6c60b0f4c 100644 --- a/.github/workflows/cpp.yml +++ b/.github/workflows/cpp.yml @@ -96,12 +96,12 @@ jobs: UBUNTU: ${{ matrix.ubuntu }} steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 submodules: recursive - name: Cache Docker Volumes - uses: actions/cache@v3 + uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # v3.3.1 with: path: .docker key: ${{ matrix.image }}-${{ hashFiles('cpp/**') }} diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml index cfa9ffb49d7ad..df2b20a9e3c77 100644 --- a/.github/workflows/dev.yml +++ b/.github/workflows/dev.yml @@ -37,11 +37,11 @@ jobs: if: ${{ !contains(github.event.pull_request.title, 'WIP') }} steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: python-version: 3.8 - name: Setup Archery @@ -84,19 +84,19 @@ jobs: GIT_COMMITTER_EMAIL: "github-actions[bot]@users.noreply.github.com" steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 - name: Install Python - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: python-version: '3.8' - name: Install Ruby - uses: ruby/setup-ruby@v1 + uses: ruby/setup-ruby@250fcd6a742febb1123a77a841497ccaa8b9e939 # v1.152.0 with: ruby-version: '2.7' - name: Install .NET - uses: actions/setup-dotnet@v3 + uses: actions/setup-dotnet@3447fd6a9f9e57506b15f895c5b76d3b197dc7c2 # v3.2.0 with: dotnet-version: '7.0.x' - name: Install Dependencies diff --git a/.github/workflows/dev_pr.yml b/.github/workflows/dev_pr.yml index e5d2a77c5a8a2..78b01b561f3cb 100644 --- a/.github/workflows/dev_pr.yml +++ b/.github/workflows/dev_pr.yml @@ -43,7 +43,7 @@ jobs: name: Process runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: repository: apache/arrow ref: main @@ -53,7 +53,7 @@ jobs: if: | (github.event.action == 'opened' || github.event.action == 'edited') - uses: actions/github-script@v6 + uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | @@ -64,7 +64,7 @@ jobs: if: | (github.event.action == 'opened' || github.event.action == 'edited') - uses: actions/github-script@v6 + uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | @@ -75,7 +75,7 @@ jobs: if: | (github.event.action == 'opened' || github.event.action == 'edited') - uses: actions/github-script@v6 + uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1 with: debug: true github-token: ${{ secrets.GITHUB_TOKEN }} @@ -87,7 +87,7 @@ jobs: if: | (github.event.action == 'opened' || github.event.action == 'synchronize') - uses: actions/labeler@v4 + uses: actions/labeler@ac9175f8a1f3625fd0d4fb234536d26811351594 # v4.3.0 with: repo-token: ${{ secrets.GITHUB_TOKEN }} configuration-path: .github/workflows/dev_pr/labeler.yml diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index a1ac4c3067dae..b30e1eb8809db 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -38,20 +38,20 @@ jobs: UBUNTU: "22.04" steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 - name: Free up disk space run: | ci/scripts/util_free_space.sh - name: Cache Docker Volumes - uses: actions/cache@v3 + uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # v3.3.1 with: path: .docker key: ubuntu-docs-${{ hashFiles('cpp/**') }} restore-keys: ubuntu-docs- - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: python-version: 3.8 - name: Setup Archery diff --git a/.github/workflows/docs_light.yml b/.github/workflows/docs_light.yml index 74e6eabe24795..e96ccecdff598 100644 --- a/.github/workflows/docs_light.yml +++ b/.github/workflows/docs_light.yml @@ -47,17 +47,17 @@ jobs: PYTHON: "3.9" steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 - name: Cache Docker Volumes - uses: actions/cache@v3 + uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # v3.3.1 with: path: .docker key: conda-docs-${{ hashFiles('cpp/**') }} restore-keys: conda-docs- - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: python-version: 3.8 - name: Setup Archery diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index a0dfb9fea1673..11668aaf1b301 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -73,7 +73,7 @@ jobs: GO: ${{ matrix.go }} steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 submodules: recursive @@ -106,7 +106,7 @@ jobs: github.event_name == 'push' && github.repository == 'apache/arrow' && github.ref_name == 'main' - uses: actions/setup-go@v4 + uses: actions/setup-go@93397bea11091df50f3d7e59dc26a7711a8bcfbe # v4.1.0 with: go-version: ${{ matrix.go }} cache: true @@ -162,12 +162,12 @@ jobs: GO: ${{ matrix.go }} steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 submodules: recursive - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: python-version: 3.8 - name: Setup Archery @@ -203,11 +203,11 @@ jobs: GO: ${{ matrix.go }} steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: python-version: 3.8 - name: Setup Archery @@ -240,12 +240,12 @@ jobs: go: [1.19, '1.20'] steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 submodules: recursive - name: Install go - uses: actions/setup-go@v4 + uses: actions/setup-go@93397bea11091df50f3d7e59dc26a7711a8bcfbe # v4.1.0 with: go-version: ${{ matrix.go }} cache: true @@ -273,12 +273,12 @@ jobs: go: [1.19, '1.20'] steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 submodules: recursive - name: Install go - uses: actions/setup-go@v4 + uses: actions/setup-go@93397bea11091df50f3d7e59dc26a7711a8bcfbe # v4.1.0 with: go-version: ${{ matrix.go }} cache: true @@ -299,7 +299,7 @@ jobs: github.event_name == 'push' && github.repository == 'apache/arrow' && github.ref_name == 'main' - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: python-version: '3.10' - name: Run Benchmarks diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 430b0bb2822e7..bd99b62a2fe02 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -62,12 +62,12 @@ jobs: timeout-minutes: 60 steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 submodules: recursive - name: Checkout Arrow Rust - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: repository: apache/arrow-rs path: rust @@ -75,13 +75,13 @@ jobs: run: | ci/scripts/util_free_space.sh - name: Cache Docker Volumes - uses: actions/cache@v3 + uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # v3.3.1 with: path: .docker key: conda-${{ hashFiles('cpp/**') }} restore-keys: conda- - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: python-version: 3.8 - name: Setup Archery diff --git a/.github/workflows/issue_bot.yml b/.github/workflows/issue_bot.yml index ae344a4c1eba9..86d1858c8c596 100644 --- a/.github/workflows/issue_bot.yml +++ b/.github/workflows/issue_bot.yml @@ -33,7 +33,7 @@ jobs: if: github.event.issue.pull_request == null runs-on: ubuntu-latest steps: - - uses: actions/github-script@v6 + - uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1 with: script: | let split_body = context.payload.issue.body.split('### Component(s)'); diff --git a/.github/workflows/java.yml b/.github/workflows/java.yml index 76bc57a6c712c..69adc184b7fe7 100644 --- a/.github/workflows/java.yml +++ b/.github/workflows/java.yml @@ -49,8 +49,8 @@ env: jobs: - debian: - name: ${{ matrix.title }} + ubuntu: + name: AMD64 Ubuntu 22.04 Java JDK ${{ matrix.jdk }} Maven ${{ matrix.maven }} runs-on: ubuntu-latest if: ${{ !contains(github.event.pull_request.title, 'WIP') }} timeout-minutes: 30 @@ -58,40 +58,25 @@ jobs: fail-fast: false matrix: jdk: [8, 11, 17, 21] - include: - - jdk: 8 - title: AMD64 Debian 9 Java JDK 8 Maven 3.5.4 - maven: 3.5.4 - image: debian-java - - jdk: 11 - title: AMD64 Debian 9 Java JDK 11 Maven 3.6.2 - maven: 3.6.2 - image: debian-java - - jdk: 17 - title: AMD64 Ubuntu 22.04 Java JDK 17 Maven 3.9.4 - maven: 3.9.4 - image: eclipse-java - - jdk: 21 - title: AMD64 Ubuntu 22.04 Java JDK 21 Maven 3.9.4 - maven: 3.9.4 - image: eclipse-java + maven: [3.9.5] + image: [java] env: JDK: ${{ matrix.jdk }} MAVEN: ${{ matrix.maven }} steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 submodules: recursive - name: Cache Docker Volumes - uses: actions/cache@v3 + uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # v3.3.1 with: path: .docker key: maven-${{ hashFiles('java/**') }} restore-keys: maven- - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: python-version: 3.8 - name: Setup Archery diff --git a/.github/workflows/java_jni.yml b/.github/workflows/java_jni.yml index 467e8a88af5d3..76b10b828ee49 100644 --- a/.github/workflows/java_jni.yml +++ b/.github/workflows/java_jni.yml @@ -56,7 +56,7 @@ jobs: timeout-minutes: 500 steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 submodules: recursive @@ -64,13 +64,13 @@ jobs: run: | ci/scripts/util_free_space.sh - name: Cache Docker Volumes - uses: actions/cache@v3 + uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # v3.3.1 with: path: .docker key: java-jni-manylinux-2014-${{ hashFiles('cpp/**', 'java/**') }} restore-keys: java-jni-manylinux-2014- - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: python-version: 3.8 - name: Setup Archery @@ -99,18 +99,18 @@ jobs: timeout-minutes: 90 steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 submodules: recursive - name: Cache Docker Volumes - uses: actions/cache@v3 + uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # v3.3.1 with: path: .docker key: maven-${{ hashFiles('java/**') }} restore-keys: maven- - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: python-version: 3.8 - name: Setup Archery diff --git a/.github/workflows/java_nightly.yml b/.github/workflows/java_nightly.yml index 41843d663051a..11aa4e59beefd 100644 --- a/.github/workflows/java_nightly.yml +++ b/.github/workflows/java_nightly.yml @@ -43,7 +43,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 1 path: arrow @@ -51,14 +51,14 @@ jobs: ref: main submodules: recursive - name: Checkout Crossbow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 path: crossbow repository: ursacomputing/crossbow ref: main - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: cache: 'pip' python-version: 3.8 diff --git a/.github/workflows/js.yml b/.github/workflows/js.yml index 781b2023e2f42..b2040a76dec48 100644 --- a/.github/workflows/js.yml +++ b/.github/workflows/js.yml @@ -47,11 +47,11 @@ jobs: timeout-minutes: 60 steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: python-version: 3.8 - name: Setup Archery diff --git a/.github/workflows/pr_bot.yml b/.github/workflows/pr_bot.yml index 617f3f2e017a3..596d3511a543d 100644 --- a/.github/workflows/pr_bot.yml +++ b/.github/workflows/pr_bot.yml @@ -40,7 +40,7 @@ jobs: - name: 'Download PR review payload' id: 'download' if: github.event_name == 'workflow_run' - uses: actions/github-script@v6 + uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410 # v6.4.1 with: script: | const run_id = "${{ github.event.workflow_run.id }}"; @@ -73,7 +73,7 @@ jobs: curl -sL -o committers.yml $url echo "committers_path=$(pwd)/committers.yml" >> $GITHUB_OUTPUT - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: path: arrow repository: apache/arrow @@ -82,7 +82,7 @@ jobs: # fetch the tags for version number generation fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: python-version: 3.8 - name: Install Archery and Crossbow dependencies diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml index 7a8fd8d10c235..d201f90101de8 100644 --- a/.github/workflows/python.yml +++ b/.github/workflows/python.yml @@ -89,18 +89,18 @@ jobs: NUMPY: ${{ matrix.numpy || 'latest' }} steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 submodules: recursive - name: Cache Docker Volumes - uses: actions/cache@v3 + uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # v3.3.1 with: path: .docker key: ${{ matrix.cache }}-${{ hashFiles('cpp/**') }} restore-keys: ${{ matrix.cache }}- - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: python-version: 3.8 - name: Setup Archery diff --git a/.github/workflows/r.yml b/.github/workflows/r.yml index a8680aea56d48..db10e6f28ce1c 100644 --- a/.github/workflows/r.yml +++ b/.github/workflows/r.yml @@ -68,12 +68,12 @@ jobs: UBUNTU: ${{ matrix.ubuntu }} steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 submodules: recursive - name: Cache Docker Volumes - uses: actions/cache@v3 + uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # v3.3.1 with: path: .docker # As this key is identical on both matrix builds only one will be able to successfully cache, @@ -83,7 +83,7 @@ jobs: ubuntu-${{ matrix.ubuntu }}-r-${{ matrix.r }}-${{ hashFiles('cpp/src/**/*.cc','cpp/src/**/*.h)') }}- ubuntu-${{ matrix.ubuntu }}-r-${{ matrix.r }}- - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: python-version: 3.8 - name: Setup Archery @@ -106,7 +106,7 @@ jobs: if: always() - name: Save the test output if: always() - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 with: name: test-output path: r/check/arrow.Rcheck/tests/testthat.Rout* @@ -139,12 +139,12 @@ jobs: DEVTOOLSET_VERSION: ${{ matrix.config.devtoolset }} steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 submodules: recursive - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: python-version: 3.8 - name: Setup Archery @@ -168,7 +168,7 @@ jobs: if: always() - name: Save the test output if: always() - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 with: name: test-output path: r/check/arrow.Rcheck/tests/testthat.Rout* diff --git a/.github/workflows/r_nightly.yml b/.github/workflows/r_nightly.yml index 7f21d4658e007..5a34239721392 100644 --- a/.github/workflows/r_nightly.yml +++ b/.github/workflows/r_nightly.yml @@ -45,7 +45,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 1 path: arrow @@ -53,14 +53,14 @@ jobs: ref: main submodules: recursive - name: Checkout Crossbow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 path: crossbow repository: ursacomputing/crossbow ref: main - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: cache: 'pip' python-version: 3.8 @@ -86,7 +86,7 @@ jobs: exit 1 fi - name: Cache Repo - uses: actions/cache@v3 + uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # v3.3.1 with: path: repo key: r-nightly-${{ github.run_id }} diff --git a/.github/workflows/ruby.yml b/.github/workflows/ruby.yml index 2e4b98c2428e9..b9a4ac03b6108 100644 --- a/.github/workflows/ruby.yml +++ b/.github/workflows/ruby.yml @@ -71,18 +71,18 @@ jobs: UBUNTU: ${{ matrix.ubuntu }} steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 submodules: recursive - name: Cache Docker Volumes - uses: actions/cache@v3 + uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # v3.3.1 with: path: .docker key: ubuntu-${{ matrix.ubuntu }}-ruby-${{ hashFiles('cpp/**') }} restore-keys: ubuntu-${{ matrix.ubuntu }}-ruby- - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 with: python-version: 3.8 - name: Setup Archery diff --git a/.github/workflows/swift.yml b/.github/workflows/swift.yml index 825921ac6fa24..f55e9e77503c0 100644 --- a/.github/workflows/swift.yml +++ b/.github/workflows/swift.yml @@ -51,7 +51,7 @@ jobs: timeout-minutes: 15 steps: - name: Checkout Arrow - uses: actions/checkout@v4 + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 submodules: recursive diff --git a/c_glib/README.md b/c_glib/README.md index 23e3bd91b8a2a..d571053c3dce8 100644 --- a/c_glib/README.md +++ b/c_glib/README.md @@ -67,7 +67,8 @@ GLib (replace the version number in the following commands with the one you use): ```console -$ wget 'https://www.apache.org/dyn/closer.lua?action=download&filename=arrow/arrow-12.0.0/apache-arrow-12.0.0.tar.gz' +$ wget 'https://www.apache.org/dyn/closer.lua?action=download&filename=arrow/arrow-12.0.0/apache-arrow-12.0.0.tar.gz' \ + --output-document apache-arrow-12.0.0.tar.gz $ tar xf apache-arrow-12.0.0.tar.gz $ cd apache-arrow-12.0.0 ``` @@ -81,7 +82,7 @@ required packages. macOS: ```console -$ brew bundle +$ brew bundle --file=c_glib/Brewfile $ meson setup c_glib.build c_glib --buildtype=release $ meson compile -C c_glib.build $ sudo meson install -C c_glib.build @@ -127,7 +128,7 @@ $ sudo pip3 install meson On macOS with [Homebrew](https://brew.sh/): ```console -$ brew bundle +$ brew bundle --file=c_glib/Brewfile ``` You can build and install Arrow GLib by the followings: diff --git a/c_glib/arrow-glib/version.h.in b/c_glib/arrow-glib/version.h.in index 60c02936193bc..abb8ba08708de 100644 --- a/c_glib/arrow-glib/version.h.in +++ b/c_glib/arrow-glib/version.h.in @@ -110,6 +110,15 @@ # define GARROW_UNAVAILABLE(major, minor) G_UNAVAILABLE(major, minor) #endif +/** + * GARROW_VERSION_15_0: + * + * You can use this macro value for compile time API version check. + * + * Since: 15.0.0 + */ +#define GARROW_VERSION_15_0 G_ENCODE_VERSION(15, 0) + /** * GARROW_VERSION_14_0: * @@ -346,6 +355,20 @@ #define GARROW_AVAILABLE_IN_ALL +#if GARROW_VERSION_MIN_REQUIRED >= GARROW_VERSION_15_0 +# define GARROW_DEPRECATED_IN_15_0 GARROW_DEPRECATED +# define GARROW_DEPRECATED_IN_15_0_FOR(function) GARROW_DEPRECATED_FOR(function) +#else +# define GARROW_DEPRECATED_IN_15_0 +# define GARROW_DEPRECATED_IN_15_0_FOR(function) +#endif + +#if GARROW_VERSION_MAX_ALLOWED < GARROW_VERSION_15_0 +# define GARROW_AVAILABLE_IN_15_0 GARROW_UNAVAILABLE(15, 0) +#else +# define GARROW_AVAILABLE_IN_15_0 +#endif + #if GARROW_VERSION_MIN_REQUIRED >= GARROW_VERSION_14_0 # define GARROW_DEPRECATED_IN_14_0 GARROW_DEPRECATED # define GARROW_DEPRECATED_IN_14_0_FOR(function) GARROW_DEPRECATED_FOR(function) diff --git a/c_glib/doc/gandiva-glib/gandiva-glib-docs.xml b/c_glib/doc/gandiva-glib/gandiva-glib-docs.xml index 182bbfb527eb2..a5c32f11337e8 100644 --- a/c_glib/doc/gandiva-glib/gandiva-glib-docs.xml +++ b/c_glib/doc/gandiva-glib/gandiva-glib-docs.xml @@ -100,6 +100,10 @@ Index of deprecated API + + Index of new symbols in 15.0.0 + + Index of new symbols in 4.0.0 diff --git a/c_glib/gandiva-glib/function-registry.cpp b/c_glib/gandiva-glib/function-registry.cpp index a95019bd62c2b..f47262986db82 100644 --- a/c_glib/gandiva-glib/function-registry.cpp +++ b/c_glib/gandiva-glib/function-registry.cpp @@ -18,8 +18,8 @@ */ #include -#include +#include #include #include @@ -34,18 +34,86 @@ G_BEGIN_DECLS * Since: 0.14.0 */ -G_DEFINE_TYPE(GGandivaFunctionRegistry, - ggandiva_function_registry, - G_TYPE_OBJECT) +struct GGandivaFunctionRegistryPrivate { + std::shared_ptr function_registry; +}; + +enum { + PROP_FUNCTION_REGISTRY = 1, +}; + +G_DEFINE_TYPE_WITH_PRIVATE(GGandivaFunctionRegistry, + ggandiva_function_registry, + G_TYPE_OBJECT) + +#define GGANDIVA_FUNCTION_REGISTRY_GET_PRIVATE(object) \ + static_cast( \ + ggandiva_function_registry_get_instance_private( \ + GGANDIVA_FUNCTION_REGISTRY(object))) + +static void +ggandiva_function_registry_finalize(GObject *object) +{ + auto priv = GGANDIVA_FUNCTION_REGISTRY_GET_PRIVATE(object); + priv->function_registry.~shared_ptr(); + G_OBJECT_CLASS(ggandiva_function_registry_parent_class)->finalize(object); +} + +static void +ggandiva_function_registry_set_property(GObject *object, + guint prop_id, + const GValue *value, + GParamSpec *pspec) +{ + auto priv = GGANDIVA_FUNCTION_REGISTRY_GET_PRIVATE(object); + + switch (prop_id) { + case PROP_FUNCTION_REGISTRY: + priv->function_registry = + *static_cast *>( + g_value_get_pointer(value)); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec); + break; + } +} static void ggandiva_function_registry_init(GGandivaFunctionRegistry *object) { + auto priv = GGANDIVA_FUNCTION_REGISTRY_GET_PRIVATE(object); + new(&priv->function_registry) std::shared_ptr; } static void ggandiva_function_registry_class_init(GGandivaFunctionRegistryClass *klass) { + auto gobject_class = G_OBJECT_CLASS(klass); + gobject_class->finalize = ggandiva_function_registry_finalize; + gobject_class->set_property = ggandiva_function_registry_set_property; + + GParamSpec *spec; + spec = g_param_spec_pointer("function-registry", + "Function registry", + "The raw std::shared_ptr *", + static_cast(G_PARAM_WRITABLE | + G_PARAM_CONSTRUCT_ONLY)); + g_object_class_install_property(gobject_class, PROP_FUNCTION_REGISTRY, spec); +} + +/** + * ggandiva_function_registry_default: + * + * Returns: (transfer full): The process-wide default function registry. + * + * Since: 15.0.0 + */ +GGandivaFunctionRegistry * +ggandiva_function_registry_default(void) +{ + auto gandiva_function_registry = gandiva::default_function_registry(); + return ggandiva_function_registry_new_raw(&gandiva_function_registry); } /** @@ -58,7 +126,8 @@ ggandiva_function_registry_class_init(GGandivaFunctionRegistryClass *klass) GGandivaFunctionRegistry * ggandiva_function_registry_new(void) { - return GGANDIVA_FUNCTION_REGISTRY(g_object_new(GGANDIVA_TYPE_FUNCTION_REGISTRY, NULL)); + auto gandiva_function_registry = std::make_shared(); + return ggandiva_function_registry_new_raw(&gandiva_function_registry); } /** @@ -75,15 +144,16 @@ GGandivaNativeFunction * ggandiva_function_registry_lookup(GGandivaFunctionRegistry *function_registry, GGandivaFunctionSignature *function_signature) { - gandiva::FunctionRegistry gandiva_function_registry; + auto gandiva_function_registry = + ggandiva_function_registry_get_raw(function_registry); auto gandiva_function_signature = ggandiva_function_signature_get_raw(function_signature); auto gandiva_native_function = - gandiva_function_registry.LookupSignature(*gandiva_function_signature); + gandiva_function_registry->LookupSignature(*gandiva_function_signature); if (gandiva_native_function) { return ggandiva_native_function_new_raw(gandiva_native_function); } else { - return NULL; + return nullptr; } } @@ -99,18 +169,32 @@ ggandiva_function_registry_lookup(GGandivaFunctionRegistry *function_registry, GList * ggandiva_function_registry_get_native_functions(GGandivaFunctionRegistry *function_registry) { - gandiva::FunctionRegistry gandiva_function_registry; - + auto gandiva_function_registry = + ggandiva_function_registry_get_raw(function_registry); GList *native_functions = nullptr; - for (auto gandiva_native_function = gandiva_function_registry.begin(); - gandiva_native_function != gandiva_function_registry.end(); - ++gandiva_native_function) { - auto native_function = ggandiva_native_function_new_raw(gandiva_native_function); + for (const auto &gandiva_native_function : *gandiva_function_registry) { + auto native_function = ggandiva_native_function_new_raw(&gandiva_native_function); native_functions = g_list_prepend(native_functions, native_function); } - native_functions = g_list_reverse(native_functions); - - return native_functions; + return g_list_reverse(native_functions); } G_END_DECLS + +GGandivaFunctionRegistry * +ggandiva_function_registry_new_raw( + std::shared_ptr *gandiva_function_registry) +{ + return GGANDIVA_FUNCTION_REGISTRY( + g_object_new(GGANDIVA_TYPE_FUNCTION_REGISTRY, + "function-registry", gandiva_function_registry, + nullptr)); +} + +std::shared_ptr +ggandiva_function_registry_get_raw(GGandivaFunctionRegistry *function_registry) +{ + auto priv = GGANDIVA_FUNCTION_REGISTRY_GET_PRIVATE(function_registry); + return priv->function_registry; +} + diff --git a/c_glib/gandiva-glib/function-registry.h b/c_glib/gandiva-glib/function-registry.h index 1a0d767d45354..8ff6027cf1734 100644 --- a/c_glib/gandiva-glib/function-registry.h +++ b/c_glib/gandiva-glib/function-registry.h @@ -35,6 +35,8 @@ struct _GGandivaFunctionRegistryClass GObjectClass parent_class; }; +GARROW_AVAILABLE_IN_15_0 +GGandivaFunctionRegistry *ggandiva_function_registry_default(void); GGandivaFunctionRegistry *ggandiva_function_registry_new(void); GGandivaNativeFunction * ggandiva_function_registry_lookup(GGandivaFunctionRegistry *function_registry, diff --git a/c_glib/gandiva-glib/function-registry.hpp b/c_glib/gandiva-glib/function-registry.hpp new file mode 100644 index 0000000000000..0430fc57dead2 --- /dev/null +++ b/c_glib/gandiva-glib/function-registry.hpp @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +#pragma once + +#include + +#include + +GGandivaFunctionRegistry * +ggandiva_function_registry_new_raw( + std::shared_ptr *gandiva_function_registry); +std::shared_ptr +ggandiva_function_registry_get_raw(GGandivaFunctionRegistry *function_registry); diff --git a/c_glib/meson.build b/c_glib/meson.build index d9de6fcbf7e24..7c495d2567d72 100644 --- a/c_glib/meson.build +++ b/c_glib/meson.build @@ -24,7 +24,7 @@ project('arrow-glib', 'c', 'cpp', 'cpp_std=c++17', ]) -version = '14.0.0-SNAPSHOT' +version = '15.0.0-SNAPSHOT' if version.endswith('-SNAPSHOT') version_numbers = version.split('-')[0].split('.') version_tag = version.split('-')[1] diff --git a/c_glib/test/gandiva/test-function-registry.rb b/c_glib/test/gandiva/test-function-registry.rb index 25bac6673105e..d0f959a1c5f5f 100644 --- a/c_glib/test/gandiva/test-function-registry.rb +++ b/c_glib/test/gandiva/test-function-registry.rb @@ -20,7 +20,7 @@ class TestGandivaFunctionRegistry < Test::Unit::TestCase def setup omit("Gandiva is required") unless defined?(::Gandiva) - @registry = Gandiva::FunctionRegistry.new + @registry = Gandiva::FunctionRegistry.default end sub_test_case("lookup") do diff --git a/c_glib/test/gandiva/test-native-function.rb b/c_glib/test/gandiva/test-native-function.rb index 7888f96b678b7..630a1f7c32d2a 100644 --- a/c_glib/test/gandiva/test-native-function.rb +++ b/c_glib/test/gandiva/test-native-function.rb @@ -20,7 +20,7 @@ class TestGandivaNativeFunction < Test::Unit::TestCase def setup omit("Gandiva is required") unless defined?(::Gandiva) - @registry = Gandiva::FunctionRegistry.new + @registry = Gandiva::FunctionRegistry.default @not = lookup("not", [boolean_data_type], boolean_data_type) @isnull = lookup("isnull", [int8_data_type], boolean_data_type) end diff --git a/ci/conda_env_python.txt b/ci/conda_env_python.txt index da52b5ea689be..97203442129c4 100644 --- a/ci/conda_env_python.txt +++ b/ci/conda_env_python.txt @@ -26,6 +26,6 @@ numpy>=1.16.6 pytest pytest-faulthandler pytest-lazy-fixture -s3fs>=2021.8.0 +s3fs>=2023.10.0 setuptools setuptools_scm<8.0.0 diff --git a/ci/docker/fedora-35-cpp.dockerfile b/ci/docker/fedora-38-cpp.dockerfile similarity index 95% rename from ci/docker/fedora-35-cpp.dockerfile rename to ci/docker/fedora-38-cpp.dockerfile index aefa25663ba14..2dcc094ee20c5 100644 --- a/ci/docker/fedora-35-cpp.dockerfile +++ b/ci/docker/fedora-38-cpp.dockerfile @@ -16,7 +16,7 @@ # under the License. ARG arch -FROM ${arch}/fedora:35 +FROM ${arch}/fedora:38 ARG arch # install dependencies @@ -46,9 +46,9 @@ RUN dnf update -y && \ java-latest-openjdk-devel \ java-latest-openjdk-headless \ json-devel \ + liborc-devel \ libzstd-devel \ llvm-devel \ - llvm-static \ lz4-devel \ make \ ninja-build \ @@ -64,6 +64,7 @@ RUN dnf update -y && \ utf8proc-devel \ wget \ which \ + xsimd-devel \ zlib-devel COPY ci/scripts/install_minio.sh /arrow/ci/scripts/ @@ -100,8 +101,6 @@ ENV absl_SOURCE=BUNDLED \ CC=gcc \ CXX=g++ \ google_cloud_cpp_storage_SOURCE=BUNDLED \ - ORC_SOURCE=BUNDLED \ PARQUET_BUILD_EXAMPLES=ON \ PARQUET_BUILD_EXECUTABLES=ON \ - PATH=/usr/lib/ccache/:$PATH \ - xsimd_SOURCE=BUNDLED + PATH=/usr/lib/ccache/:$PATH diff --git a/ci/docker/ubuntu-swift.dockerfile b/ci/docker/ubuntu-swift.dockerfile index 5ef6bc433df38..4789c9188c226 100644 --- a/ci/docker/ubuntu-swift.dockerfile +++ b/ci/docker/ubuntu-swift.dockerfile @@ -17,8 +17,18 @@ FROM swift:5.7.3 -# Install golang +# Go is needed for generating test data RUN apt-get update -y -q && \ apt-get install -y -q --no-install-recommends \ - golang-go && \ - apt-get clean \ No newline at end of file + golang-go \ + unzip \ + wget && \ + apt-get clean + +ARG swift_lint=0.53.0 +RUN wget https://github.com/realm/SwiftLint/releases/download/${swift_lint}/swiftlint_linux.zip && \ + unzip swiftlint_linux.zip && \ + mv swiftlint /usr/local/bin/ && \ + mkdir -p /usr/local/share/doc/swiftlint/ && \ + mv LICENSE /usr/local/share/doc/swiftlint/ && \ + rm -rf swiftlint_linux.zip diff --git a/ci/scripts/PKGBUILD b/ci/scripts/PKGBUILD index dcd313087e966..95029d98f7a01 100644 --- a/ci/scripts/PKGBUILD +++ b/ci/scripts/PKGBUILD @@ -18,7 +18,7 @@ _realname=arrow pkgbase=mingw-w64-${_realname} pkgname="${MINGW_PACKAGE_PREFIX}-${_realname}" -pkgver=13.0.0.9000 +pkgver=14.0.0.9000 pkgrel=8000 pkgdesc="Apache Arrow is a cross-language development platform for in-memory data (mingw-w64)" arch=("any") diff --git a/ci/scripts/cpp_test.sh b/ci/scripts/cpp_test.sh index 3acf56bae0fe4..0c6e1c6ef7057 100755 --- a/ci/scripts/cpp_test.sh +++ b/ci/scripts/cpp_test.sh @@ -86,7 +86,7 @@ ctest \ --parallel ${n_jobs} \ --timeout ${ARROW_CTEST_TIMEOUT:-300} \ "${ctest_options[@]}" \ - $@ + "$@" if [ "${ARROW_BUILD_EXAMPLES}" == "ON" ]; then examples=$(find ${binary_output_dir} -executable -name "*example") diff --git a/ci/scripts/go_build.sh b/ci/scripts/go_build.sh index 7c5ca3230c96e..94f75e501ea0b 100755 --- a/ci/scripts/go_build.sh +++ b/ci/scripts/go_build.sh @@ -42,7 +42,9 @@ go install -v ./... popd -if [[ -n "${ARROW_GO_INTEGRATION}" ]]; then +: ${ARROW_INTEGRATION_GO:=ON} + +if [ "${ARROW_INTEGRATION_GO}" == "ON" ]; then pushd ${source_dir}/arrow/internal/cdata_integration case "$(uname)" in diff --git a/ci/scripts/integration_arrow.sh b/ci/scripts/integration_arrow.sh index 2861b1c09d479..b5a38f01412d4 100755 --- a/ci/scripts/integration_arrow.sh +++ b/ci/scripts/integration_arrow.sh @@ -20,11 +20,25 @@ set -ex arrow_dir=${1} +build_dir=${2} + gold_dir=$arrow_dir/testing/data/arrow-ipc-stream/integration +: ${ARROW_INTEGRATION_CPP:=ON} +: ${ARROW_INTEGRATION_CSHARP:=ON} +: ${ARROW_INTEGRATION_GO:=ON} +: ${ARROW_INTEGRATION_JAVA:=ON} +: ${ARROW_INTEGRATION_JS:=ON} + pip install -e $arrow_dir/dev/archery[integration] + # For C Data Interface testing -pip install jpype1 pythonnet +if [ "${ARROW_INTEGRATION_CSHARP}" == "ON" ]; then + pip install pythonnet +fi +if [ "${ARROW_INTEGRATION_JAVA}" == "ON" ]; then + pip install jpype1 +fi # Get more detailed context on crashes export PYTHONFAULTHANDLER=1 @@ -34,11 +48,11 @@ time archery integration \ --run-c-data \ --run-ipc \ --run-flight \ - --with-cpp=1 \ - --with-csharp=1 \ - --with-java=1 \ - --with-js=1 \ - --with-go=1 \ + --with-cpp=$([ "$ARROW_INTEGRATION_CPP" == "ON" ] && echo "1" || echo "0") \ + --with-csharp=$([ "$ARROW_INTEGRATION_CSHARP" == "ON" ] && echo "1" || echo "0") \ + --with-go=$([ "$ARROW_INTEGRATION_GO" == "ON" ] && echo "1" || echo "0") \ + --with-java=$([ "$ARROW_INTEGRATION_JAVA" == "ON" ] && echo "1" || echo "0") \ + --with-js=$([ "$ARROW_INTEGRATION_JS" == "ON" ] && echo "1" || echo "0") \ --gold-dirs=$gold_dir/0.14.1 \ --gold-dirs=$gold_dir/0.17.1 \ --gold-dirs=$gold_dir/1.0.0-bigendian \ diff --git a/ci/scripts/integration_arrow_build.sh b/ci/scripts/integration_arrow_build.sh new file mode 100755 index 0000000000000..02f593bf77b23 --- /dev/null +++ b/ci/scripts/integration_arrow_build.sh @@ -0,0 +1,55 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +set -ex + +arrow_dir=${1} +build_dir=${2} + +: ${ARROW_INTEGRATION_CPP:=ON} +: ${ARROW_INTEGRATION_CSHARP:=ON} +: ${ARROW_INTEGRATION_GO:=ON} +: ${ARROW_INTEGRATION_JAVA:=ON} +: ${ARROW_INTEGRATION_JS:=ON} + +${arrow_dir}/ci/scripts/rust_build.sh ${arrow_dir} ${build_dir} + +if [ "${ARROW_INTEGRATION_CPP}" == "ON" ]; then + ${arrow_dir}/ci/scripts/cpp_build.sh ${arrow_dir} ${build_dir} +fi + +if [ "${ARROW_INTEGRATION_CSHARP}" == "ON" ]; then + ${arrow_dir}/ci/scripts/csharp_build.sh ${arrow_dir} ${build_dir} +fi + +if [ "${ARROW_INTEGRATION_GO}" == "ON" ]; then + ${arrow_dir}/ci/scripts/go_build.sh ${arrow_dir} ${build_dir} +fi + +if [ "${ARROW_INTEGRATION_JAVA}" == "ON" ]; then + export ARROW_JAVA_CDATA="ON" + export JAVA_JNI_CMAKE_ARGS="-DARROW_JAVA_JNI_ENABLE_DEFAULT=OFF -DARROW_JAVA_JNI_ENABLE_C=ON" + + ${arrow_dir}/ci/scripts/java_jni_build.sh ${arrow_dir} ${ARROW_HOME} ${build_dir} /tmp/dist/java/$(arch) + ${arrow_dir}/ci/scripts/java_build.sh ${arrow_dir} ${build_dir} /tmp/dist/java +fi + +if [ "${ARROW_INTEGRATION_JS}" == "ON" ]; then + ${arrow_dir}/ci/scripts/js_build.sh ${arrow_dir} ${build_dir} +fi diff --git a/ci/scripts/python_wheel_macos_build.sh b/ci/scripts/python_wheel_macos_build.sh index 5a3c6fb6d1f6f..fd845c512dcdb 100755 --- a/ci/scripts/python_wheel_macos_build.sh +++ b/ci/scripts/python_wheel_macos_build.sh @@ -34,7 +34,7 @@ rm -rf ${source_dir}/python/pyarrow/*.so.* echo "=== (${PYTHON_VERSION}) Set SDK, C++ and Wheel flags ===" export _PYTHON_HOST_PLATFORM="macosx-${MACOSX_DEPLOYMENT_TARGET}-${arch}" -export MACOSX_DEPLOYMENT_TARGET=${MACOSX_DEPLOYMENT_TARGET:-10.14} +export MACOSX_DEPLOYMENT_TARGET=${MACOSX_DEPLOYMENT_TARGET:-10.15} export SDKROOT=${SDKROOT:-$(xcrun --sdk macosx --show-sdk-path)} if [ $arch = "arm64" ]; then diff --git a/ci/scripts/r_test.sh b/ci/scripts/r_test.sh index e0c2ce9efedd8..22ec551edb9fa 100755 --- a/ci/scripts/r_test.sh +++ b/ci/scripts/r_test.sh @@ -27,7 +27,7 @@ pushd ${source_dir} printenv # Run the nixlibs.R test suite, which is not included in the installed package -${R_BIN} -e 'setwd("tools"); testthat::test_dir(".")' +${R_BIN} -e 'setwd("tools"); testthat::test_dir(".", stop_on_warning = TRUE)' # Before release, we always copy the relevant parts of the cpp source into the # package. In some CI checks, we will use this version of the source: diff --git a/ci/scripts/swift_test.sh b/ci/scripts/swift_test.sh index b7ab37fd489c9..b523e3891d93c 100755 --- a/ci/scripts/swift_test.sh +++ b/ci/scripts/swift_test.sh @@ -20,12 +20,18 @@ set -ex data_gen_dir=${1}/swift/data-generator/swift-datagen +export GOPATH=/ pushd ${data_gen_dir} go get -d ./... -go run main.go +go run . cp *.arrow ../../Arrow popd +source_dir=${1}/swift +pushd ${source_dir} +swiftlint --strict +popd + source_dir=${1}/swift/Arrow pushd ${source_dir} swift test diff --git a/ci/scripts/util_free_space.sh b/ci/scripts/util_free_space.sh index 0518869d06993..dd6ba2c4600a9 100755 --- a/ci/scripts/util_free_space.sh +++ b/ci/scripts/util_free_space.sh @@ -25,7 +25,6 @@ du -hsc /usr/local/* echo "::endgroup::" # ~1GB sudo rm -rf \ - /usr/local/aws-cli \ /usr/local/aws-sam-cil \ /usr/local/julia* || : echo "::group::/usr/local/bin/*" @@ -34,8 +33,6 @@ echo "::endgroup::" # ~1GB (From 1.2GB to 214MB) sudo rm -rf \ /usr/local/bin/aliyun \ - /usr/local/bin/aws \ - /usr/local/bin/aws_completer \ /usr/local/bin/azcopy \ /usr/local/bin/bicep \ /usr/local/bin/cmake-gui \ diff --git a/ci/vcpkg/universal2-osx-static-debug.cmake b/ci/vcpkg/universal2-osx-static-debug.cmake index 580b4604d522f..8abc1ebf838f1 100644 --- a/ci/vcpkg/universal2-osx-static-debug.cmake +++ b/ci/vcpkg/universal2-osx-static-debug.cmake @@ -21,6 +21,6 @@ set(VCPKG_LIBRARY_LINKAGE static) set(VCPKG_CMAKE_SYSTEM_NAME Darwin) set(VCPKG_OSX_ARCHITECTURES "x86_64;arm64") -set(VCPKG_OSX_DEPLOYMENT_TARGET "10.14") +set(VCPKG_OSX_DEPLOYMENT_TARGET "10.15") set(VCPKG_BUILD_TYPE debug) diff --git a/ci/vcpkg/universal2-osx-static-release.cmake b/ci/vcpkg/universal2-osx-static-release.cmake index 7247d0af351c5..2eb36c15175b2 100644 --- a/ci/vcpkg/universal2-osx-static-release.cmake +++ b/ci/vcpkg/universal2-osx-static-release.cmake @@ -21,6 +21,6 @@ set(VCPKG_LIBRARY_LINKAGE static) set(VCPKG_CMAKE_SYSTEM_NAME Darwin) set(VCPKG_OSX_ARCHITECTURES "x86_64;arm64") -set(VCPKG_OSX_DEPLOYMENT_TARGET "10.14") +set(VCPKG_OSX_DEPLOYMENT_TARGET "10.15") set(VCPKG_BUILD_TYPE release) diff --git a/cpp/CMakeLists.txt b/cpp/CMakeLists.txt index 8566508406bd4..bcb298407bd8b 100644 --- a/cpp/CMakeLists.txt +++ b/cpp/CMakeLists.txt @@ -71,7 +71,7 @@ if(POLICY CMP0135) cmake_policy(SET CMP0135 NEW) endif() -set(ARROW_VERSION "14.0.0-SNAPSHOT") +set(ARROW_VERSION "15.0.0-SNAPSHOT") string(REGEX MATCH "^[0-9]+\\.[0-9]+\\.[0-9]+" ARROW_BASE_VERSION "${ARROW_VERSION}") @@ -770,10 +770,10 @@ if(ARROW_WITH_ZSTD) endif() if(ARROW_ORC) - list(APPEND ARROW_SHARED_LINK_LIBS orc::liborc ${ARROW_PROTOBUF_LIBPROTOBUF}) - list(APPEND ARROW_STATIC_LINK_LIBS orc::liborc ${ARROW_PROTOBUF_LIBPROTOBUF}) + list(APPEND ARROW_SHARED_LINK_LIBS orc::orc ${ARROW_PROTOBUF_LIBPROTOBUF}) + list(APPEND ARROW_STATIC_LINK_LIBS orc::orc ${ARROW_PROTOBUF_LIBPROTOBUF}) if(ORC_SOURCE STREQUAL "SYSTEM") - list(APPEND ARROW_STATIC_INSTALL_INTERFACE_LIBS orc::liborc + list(APPEND ARROW_STATIC_INSTALL_INTERFACE_LIBS orc::orc ${ARROW_PROTOBUF_LIBPROTOBUF}) endif() endif() @@ -889,8 +889,8 @@ if(NOT MSVC_TOOLCHAIN) list(APPEND ARROW_SHARED_LINK_LIBS ${CMAKE_DL_LIBS}) endif() -set(ARROW_TEST_LINK_TOOLCHAIN arrow::flatbuffers ${ARROW_GTEST_GTEST_MAIN} - ${ARROW_GTEST_GTEST} ${ARROW_GTEST_GMOCK}) +set(ARROW_TEST_LINK_TOOLCHAIN arrow::flatbuffers ${ARROW_GTEST_GMOCK} + ${ARROW_GTEST_GTEST_MAIN}) if(ARROW_BUILD_TESTS) add_dependencies(arrow_test_dependencies ${ARROW_TEST_LINK_TOOLCHAIN}) @@ -909,7 +909,7 @@ set(ARROW_TEST_SHARED_LINK_LIBS arrow_testing_shared arrow_shared ${ARROW_SHARED_LINK_LIBS} ${ARROW_TEST_LINK_TOOLCHAIN}) if(NOT MSVC) - set(ARROW_TEST_SHARED_LINK_LIBS ${ARROW_TEST_SHARED_LINK_LIBS} ${CMAKE_DL_LIBS}) + list(APPEND ARROW_TEST_SHARED_LINK_LIBS ${CMAKE_DL_LIBS}) endif() if("${ARROW_TEST_LINKAGE}" STREQUAL "shared") diff --git a/cpp/cmake_modules/FindGTestAlt.cmake b/cpp/cmake_modules/FindGTestAlt.cmake index 77d4f39d9e0bf..d1873d138e6c7 100644 --- a/cpp/cmake_modules/FindGTestAlt.cmake +++ b/cpp/cmake_modules/FindGTestAlt.cmake @@ -63,4 +63,7 @@ TEST(CXX_STANDARD, MatcherStringView) { find_package_handle_standard_args(GTestAlt REQUIRED_VARS GTestAlt_CXX_STANDARD_AVAILABLE) endif() + + target_link_libraries(GTest::gmock INTERFACE GTest::gtest) + target_link_libraries(GTest::gtest_main INTERFACE GTest::gtest) endif() diff --git a/cpp/cmake_modules/FindORC.cmake b/cpp/cmake_modules/FindorcAlt.cmake similarity index 68% rename from cpp/cmake_modules/FindORC.cmake rename to cpp/cmake_modules/FindorcAlt.cmake index aca915acc13d0..dc3b978cf4037 100644 --- a/cpp/cmake_modules/FindORC.cmake +++ b/cpp/cmake_modules/FindorcAlt.cmake @@ -15,13 +15,20 @@ # specific language governing permissions and limitations # under the License. -# - Find Apache ORC C++ (orc/orc-config.h, liborc.a) -# This module defines -# ORC_INCLUDE_DIR, directory containing headers -# ORC_STATIC_LIB, path to liborc.a -# ORC_FOUND, whether orc has been found +if(orcAlt_FOUND) + return() +endif() -if(ORC_FOUND) +set(find_package_args) +if(orcAlt_FIND_VERSION) + list(APPEND find_package_args ${orcAlt_FIND_VERSION}) +endif() +if(orcAlt_FIND_QUIETLY) + list(APPEND find_package_args QUIET) +endif() +find_package(orc ${find_package_args}) +if(orc_FOUND) + set(orcAlt_FOUND TRUE) return() endif() @@ -45,15 +52,13 @@ else() PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) endif() -if(ORC_STATIC_LIB AND ORC_INCLUDE_DIR) - set(ORC_FOUND TRUE) - add_library(orc::liborc STATIC IMPORTED) - set_target_properties(orc::liborc - PROPERTIES IMPORTED_LOCATION "${ORC_STATIC_LIB}" - INTERFACE_INCLUDE_DIRECTORIES "${ORC_INCLUDE_DIR}") -else() - if(ORC_FIND_REQUIRED) - message(FATAL_ERROR "ORC library was required in toolchain and unable to locate") +find_package_handle_standard_args(orcAlt REQUIRED_VARS ORC_STATIC_LIB ORC_INCLUDE_DIR) + +if(orcAlt_FOUND) + if(NOT TARGET orc::orc) + add_library(orc::orc STATIC IMPORTED) + set_target_properties(orc::orc + PROPERTIES IMPORTED_LOCATION "${ORC_STATIC_LIB}" + INTERFACE_INCLUDE_DIRECTORIES "${ORC_INCLUDE_DIR}") endif() - set(ORC_FOUND FALSE) endif() diff --git a/cpp/cmake_modules/GandivaAddBitcode.cmake b/cpp/cmake_modules/GandivaAddBitcode.cmake new file mode 100644 index 0000000000000..98847f8a186fe --- /dev/null +++ b/cpp/cmake_modules/GandivaAddBitcode.cmake @@ -0,0 +1,75 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# Create bitcode for the given source file. +function(gandiva_add_bitcode SOURCE) + set(CLANG_OPTIONS -std=c++17) + if(MSVC) + # "19.20" means that it's compatible with Visual Studio 16 2019. + # We can update this to "19.30" when we dropped support for Visual + # Studio 16 2019. + # + # See https://cmake.org/cmake/help/latest/variable/MSVC_VERSION.html + # for MSVC_VERSION and Visual Studio version. + set(FMS_COMPATIBILITY 19.20) + list(APPEND CLANG_OPTIONS -fms-compatibility + -fms-compatibility-version=${FMS_COMPATIBILITY}) + endif() + + get_filename_component(SOURCE_BASE ${SOURCE} NAME_WE) + get_filename_component(ABSOLUTE_SOURCE ${SOURCE} ABSOLUTE) + set(BC_FILE ${CMAKE_CURRENT_BINARY_DIR}/${SOURCE_BASE}.bc) + set(PRECOMPILE_COMMAND) + if(CMAKE_OSX_SYSROOT) + list(APPEND + PRECOMPILE_COMMAND + ${CMAKE_COMMAND} + -E + env + SDKROOT=${CMAKE_OSX_SYSROOT}) + endif() + list(APPEND + PRECOMPILE_COMMAND + ${CLANG_EXECUTABLE} + ${CLANG_OPTIONS} + -DGANDIVA_IR + -DNDEBUG # DCHECK macros not implemented in precompiled code + -DARROW_STATIC # Do not set __declspec(dllimport) on MSVC on Arrow symbols + -DGANDIVA_STATIC # Do not set __declspec(dllimport) on MSVC on Gandiva symbols + -fno-use-cxa-atexit # Workaround for unresolved __dso_handle + -emit-llvm + -O3 + -c + ${ABSOLUTE_SOURCE} + -o + ${BC_FILE} + ${ARROW_GANDIVA_PC_CXX_FLAGS}) + if(ARROW_BINARY_DIR) + list(APPEND PRECOMPILE_COMMAND -I${ARROW_BINARY_DIR}/src) + endif() + if(ARROW_SOURCE_DIR) + list(APPEND PRECOMPILE_COMMAND -I${ARROW_SOURCE_DIR}/src) + endif() + if(NOT ARROW_USE_NATIVE_INT128) + foreach(boost_include_dir ${Boost_INCLUDE_DIRS}) + list(APPEND PRECOMPILE_COMMAND -I${boost_include_dir}) + endforeach() + endif() + add_custom_command(OUTPUT ${BC_FILE} + COMMAND ${PRECOMPILE_COMMAND} + DEPENDS ${SOURCE_FILE}) +endfunction() diff --git a/cpp/cmake_modules/ThirdpartyToolchain.cmake b/cpp/cmake_modules/ThirdpartyToolchain.cmake index 559ddf14f6a91..52632d554aafb 100644 --- a/cpp/cmake_modules/ThirdpartyToolchain.cmake +++ b/cpp/cmake_modules/ThirdpartyToolchain.cmake @@ -65,7 +65,7 @@ set(ARROW_THIRDPARTY_DEPENDENCIES lz4 nlohmann_json opentelemetry-cpp - ORC + orc re2 Protobuf RapidJSON @@ -94,6 +94,14 @@ if("${re2_SOURCE}" STREQUAL "" AND NOT "${RE2_SOURCE}" STREQUAL "") set(re2_SOURCE ${RE2_SOURCE}) endif() +# For backward compatibility. We use "ORC_SOURCE" if "orc_SOURCE" +# isn't specified and "ORC_SOURCE" is specified. +# We renamed "ORC" dependency name to "orc" in 15.0.0 because +# upstream uses "orc" not "ORC" as package name. +if("${orc_SOURCE}" STREQUAL "" AND NOT "${ORC_SOURCE}" STREQUAL "") + set(orc_SOURCE ${ORC_SOURCE}) +endif() + # For backward compatibility. We use "RE2_ROOT" if "re2_ROOT" # isn't specified and "RE2_ROOT" is specified. if("${re2_ROOT}" STREQUAL "" AND NOT "${RE2_ROOT}" STREQUAL "") @@ -193,7 +201,7 @@ macro(build_dependency DEPENDENCY_NAME) build_nlohmann_json() elseif("${DEPENDENCY_NAME}" STREQUAL "opentelemetry-cpp") build_opentelemetry() - elseif("${DEPENDENCY_NAME}" STREQUAL "ORC") + elseif("${DEPENDENCY_NAME}" STREQUAL "orc") build_orc() elseif("${DEPENDENCY_NAME}" STREQUAL "Protobuf") build_protobuf() @@ -222,18 +230,21 @@ macro(build_dependency DEPENDENCY_NAME) endif() endmacro() -# Find modules are needed by the consumer in case of a static build, or if the -# linkage is PUBLIC or INTERFACE. -macro(provide_find_module PACKAGE_NAME ARROW_CMAKE_PACKAGE_NAME) - set(module_ "${CMAKE_SOURCE_DIR}/cmake_modules/Find${PACKAGE_NAME}.cmake") - if(EXISTS "${module_}") - message(STATUS "Providing CMake module for ${PACKAGE_NAME} as part of ${ARROW_CMAKE_PACKAGE_NAME} CMake package" +function(provide_cmake_module MODULE_NAME ARROW_CMAKE_PACKAGE_NAME) + set(module "${CMAKE_SOURCE_DIR}/cmake_modules/${MODULE_NAME}.cmake") + if(EXISTS "${module}") + message(STATUS "Providing CMake module for ${MODULE_NAME} as part of ${ARROW_CMAKE_PACKAGE_NAME} CMake package" ) - install(FILES "${module_}" + install(FILES "${module}" DESTINATION "${ARROW_CMAKE_DIR}/${ARROW_CMAKE_PACKAGE_NAME}") endif() - unset(module_) -endmacro() +endfunction() + +# Find modules are needed by the consumer in case of a static build, or if the +# linkage is PUBLIC or INTERFACE. +function(provide_find_module PACKAGE_NAME ARROW_CMAKE_PACKAGE_NAME) + provide_cmake_module("Find${PACKAGE_NAME}" ${ARROW_CMAKE_PACKAGE_NAME}) +endfunction() macro(resolve_dependency DEPENDENCY_NAME) set(options) @@ -4423,31 +4434,31 @@ macro(build_orc) set(ORC_VENDORED 1) - add_library(orc::liborc STATIC IMPORTED) - set_target_properties(orc::liborc PROPERTIES IMPORTED_LOCATION "${ORC_STATIC_LIB}") - target_include_directories(orc::liborc BEFORE INTERFACE "${ORC_INCLUDE_DIR}") - set(ORC_LINK_LIBRARIES LZ4::lz4 ZLIB::ZLIB ${ARROW_ZSTD_LIBZSTD} ${Snappy_TARGET}) + add_library(orc::orc STATIC IMPORTED) + set_target_properties(orc::orc PROPERTIES IMPORTED_LOCATION "${ORC_STATIC_LIB}") + target_include_directories(orc::orc BEFORE INTERFACE "${ORC_INCLUDE_DIR}") + target_link_libraries(orc::orc INTERFACE LZ4::lz4 ZLIB::ZLIB ${ARROW_ZSTD_LIBZSTD} + ${Snappy_TARGET}) # Protobuf generated files may use ABSL_DCHECK*() and # absl::log_internal_check_op is needed for them. if(TARGET absl::log_internal_check_op) - list(APPEND ORC_LINK_LIBRARIES absl::log_internal_check_op) + target_link_libraries(orc::orc INTERFACE absl::log_internal_check_op) endif() if(NOT MSVC) if(NOT APPLE AND ARROW_ENABLE_THREADING) - list(APPEND ORC_LINK_LIBRARIES Threads::Threads) + target_link_libraries(orc::orc INTERFACE Threads::Threads) endif() - list(APPEND ORC_LINK_LIBRARIES ${CMAKE_DL_LIBS}) + target_link_libraries(orc::orc INTERFACE ${CMAKE_DL_LIBS}) endif() - target_link_libraries(orc::liborc INTERFACE ${ORC_LINK_LIBRARIES}) add_dependencies(toolchain orc_ep) - add_dependencies(orc::liborc orc_ep) + add_dependencies(orc::orc orc_ep) - list(APPEND ARROW_BUNDLED_STATIC_LIBS orc::liborc) + list(APPEND ARROW_BUNDLED_STATIC_LIBS orc::orc) endmacro() if(ARROW_ORC) - resolve_dependency(ORC) + resolve_dependency(orc HAVE_ALT TRUE) message(STATUS "Found ORC static library: ${ORC_STATIC_LIB}") message(STATUS "Found ORC headers: ${ORC_INCLUDE_DIR}") endif() diff --git a/cpp/src/arrow/CMakeLists.txt b/cpp/src/arrow/CMakeLists.txt index 9a6117011535e..24e8eefad1523 100644 --- a/cpp/src/arrow/CMakeLists.txt +++ b/cpp/src/arrow/CMakeLists.txt @@ -223,6 +223,7 @@ set(ARROW_SRCS util/debug.cc util/decimal.cc util/delimiting.cc + util/float16.cc util/formatting.cc util/future.cc util/hashing.cc @@ -502,8 +503,8 @@ if(ARROW_FILESYSTEM) filesystem/util_internal.cc) if(ARROW_AZURE) - list(APPEND ARROW_SRCS filesystem/azurefs.cc) - set_source_files_properties(filesystem/azurefs.cc + list(APPEND ARROW_SRCS filesystem/azurefs.cc filesystem/azurefs_internal.cc) + set_source_files_properties(filesystem/azurefs.cc filesystem/azurefs_internal.cc PROPERTIES SKIP_PRECOMPILE_HEADERS ON SKIP_UNITY_BUILD_INCLUSION ON) endif() diff --git a/cpp/src/arrow/acero/CMakeLists.txt b/cpp/src/arrow/acero/CMakeLists.txt index 44fbb26f0814d..b77d52a23eedb 100644 --- a/cpp/src/arrow/acero/CMakeLists.txt +++ b/cpp/src/arrow/acero/CMakeLists.txt @@ -49,9 +49,11 @@ set(ARROW_ACERO_SRCS project_node.cc query_context.cc sink_node.cc + sorted_merge_node.cc source_node.cc swiss_join.cc task_util.cc + time_series_util.cc tpch_node.cc union_node.cc util.cc) @@ -123,8 +125,7 @@ if(ARROW_TESTING) add_library(arrow_acero_testing OBJECT test_util_internal.cc) # Even though this is still just an object library we still need to "link" our # dependencies so that include paths are configured correctly - target_link_libraries(arrow_acero_testing ${ARROW_ACERO_TEST_LINK_LIBS}) - target_link_libraries(arrow_acero_testing ${ARROW_GTEST_GTEST}) + target_link_libraries(arrow_acero_testing PRIVATE ${ARROW_ACERO_TEST_LINK_LIBS}) list(APPEND ARROW_ACERO_TEST_LINK_LIBS arrow_acero_testing) endif() @@ -174,11 +175,13 @@ add_arrow_acero_test(hash_join_node_test SOURCES hash_join_node_test.cc add_arrow_acero_test(pivot_longer_node_test SOURCES pivot_longer_node_test.cc test_nodes.cc) -# asof_join_node uses std::thread internally +# asof_join_node and sorted_merge_node use std::thread internally # and doesn't use ThreadPool so it will # be broken if threading is turned off if(ARROW_ENABLE_THREADING) add_arrow_acero_test(asof_join_node_test SOURCES asof_join_node_test.cc test_nodes.cc) + add_arrow_acero_test(sorted_merge_node_test SOURCES sorted_merge_node_test.cc + test_nodes.cc) endif() add_arrow_acero_test(tpch_node_test SOURCES tpch_node_test.cc) diff --git a/cpp/src/arrow/acero/asof_join_node.cc b/cpp/src/arrow/acero/asof_join_node.cc index d19d2db299cba..4a3b6b199c4c0 100644 --- a/cpp/src/arrow/acero/asof_join_node.cc +++ b/cpp/src/arrow/acero/asof_join_node.cc @@ -16,6 +16,8 @@ // under the License. #include "arrow/acero/asof_join_node.h" +#include "arrow/acero/backpressure_handler.h" +#include "arrow/acero/concurrent_queue_internal.h" #include #include @@ -30,6 +32,7 @@ #include "arrow/acero/exec_plan.h" #include "arrow/acero/options.h" +#include "arrow/acero/unmaterialized_table.h" #ifndef NDEBUG #include "arrow/acero/options_internal.h" #endif @@ -41,6 +44,7 @@ #ifndef NDEBUG #include "arrow/compute/function_internal.h" #endif +#include "arrow/acero/time_series_util.h" #include "arrow/compute/key_hash.h" #include "arrow/compute/light_array.h" #include "arrow/record_batch.h" @@ -122,92 +126,12 @@ struct TolType { typedef uint64_t row_index_t; typedef int col_index_t; -// normalize the value to 64-bits while preserving ordering of values -template ::value, bool> = true> -static inline uint64_t time_value(T t) { - uint64_t bias = std::is_signed::value ? (uint64_t)1 << (8 * sizeof(T) - 1) : 0; - return t < 0 ? static_cast(t + bias) : static_cast(t); -} - // indicates normalization of a key value template ::value, bool> = true> static inline uint64_t key_value(T t) { return static_cast(t); } -/** - * Simple implementation for an unbound concurrent queue - */ -template -class ConcurrentQueue { - public: - T Pop() { - std::unique_lock lock(mutex_); - cond_.wait(lock, [&] { return !queue_.empty(); }); - return PopUnlocked(); - } - - T PopUnlocked() { - auto item = queue_.front(); - queue_.pop(); - return item; - } - - void Push(const T& item) { - std::unique_lock lock(mutex_); - return PushUnlocked(item); - } - - void PushUnlocked(const T& item) { - queue_.push(item); - cond_.notify_one(); - } - - void Clear() { - std::unique_lock lock(mutex_); - ClearUnlocked(); - } - - void ClearUnlocked() { queue_ = std::queue(); } - - std::optional TryPop() { - std::unique_lock lock(mutex_); - return TryPopUnlocked(); - } - - std::optional TryPopUnlocked() { - // Try to pop the oldest value from the queue (or return nullopt if none) - if (queue_.empty()) { - return std::nullopt; - } else { - auto item = queue_.front(); - queue_.pop(); - return item; - } - } - - bool Empty() const { - std::unique_lock lock(mutex_); - return queue_.empty(); - } - - // Un-synchronized access to front - // For this to be "safe": - // 1) the caller logically guarantees that queue is not empty - // 2) pop/try_pop cannot be called concurrently with this - const T& UnsyncFront() const { return queue_.front(); } - - size_t UnsyncSize() const { return queue_.size(); } - - protected: - std::mutex& GetMutex() { return mutex_; } - - private: - std::queue queue_; - mutable std::mutex mutex_; - std::condition_variable cond_; -}; - class AsofJoinNode; #ifndef NDEBUG @@ -547,104 +471,6 @@ class BackpressureController : public BackpressureControl { std::atomic& backpressure_counter_; }; -class BackpressureHandler { - private: - BackpressureHandler(ExecNode* input, size_t low_threshold, size_t high_threshold, - std::unique_ptr backpressure_control) - : input_(input), - low_threshold_(low_threshold), - high_threshold_(high_threshold), - backpressure_control_(std::move(backpressure_control)) {} - - public: - static Result Make( - ExecNode* input, size_t low_threshold, size_t high_threshold, - std::unique_ptr backpressure_control) { - if (low_threshold >= high_threshold) { - return Status::Invalid("low threshold (", low_threshold, - ") must be less than high threshold (", high_threshold, ")"); - } - if (backpressure_control == NULLPTR) { - return Status::Invalid("null backpressure control parameter"); - } - BackpressureHandler backpressure_handler(input, low_threshold, high_threshold, - std::move(backpressure_control)); - return std::move(backpressure_handler); - } - - void Handle(size_t start_level, size_t end_level) { - if (start_level < high_threshold_ && end_level >= high_threshold_) { - backpressure_control_->Pause(); - } else if (start_level > low_threshold_ && end_level <= low_threshold_) { - backpressure_control_->Resume(); - } - } - - Status ForceShutdown() { - // It may be unintuitive to call Resume() here, but this is to avoid a deadlock. - // Since acero's executor won't terminate if any one node is paused, we need to - // force resume the node before stopping production. - backpressure_control_->Resume(); - return input_->StopProducing(); - } - - private: - ExecNode* input_; - size_t low_threshold_; - size_t high_threshold_; - std::unique_ptr backpressure_control_; -}; - -template -class BackpressureConcurrentQueue : public ConcurrentQueue { - private: - struct DoHandle { - explicit DoHandle(BackpressureConcurrentQueue& queue) - : queue_(queue), start_size_(queue_.UnsyncSize()) {} - - ~DoHandle() { - size_t end_size = queue_.UnsyncSize(); - queue_.handler_.Handle(start_size_, end_size); - } - - BackpressureConcurrentQueue& queue_; - size_t start_size_; - }; - - public: - explicit BackpressureConcurrentQueue(BackpressureHandler handler) - : handler_(std::move(handler)) {} - - T Pop() { - std::unique_lock lock(ConcurrentQueue::GetMutex()); - DoHandle do_handle(*this); - return ConcurrentQueue::PopUnlocked(); - } - - void Push(const T& item) { - std::unique_lock lock(ConcurrentQueue::GetMutex()); - DoHandle do_handle(*this); - ConcurrentQueue::PushUnlocked(item); - } - - void Clear() { - std::unique_lock lock(ConcurrentQueue::GetMutex()); - DoHandle do_handle(*this); - ConcurrentQueue::ClearUnlocked(); - } - - std::optional TryPop() { - std::unique_lock lock(ConcurrentQueue::GetMutex()); - DoHandle do_handle(*this); - return ConcurrentQueue::TryPopUnlocked(); - } - - Status ForceShutdown() { return handler_.ForceShutdown(); } - - private: - BackpressureHandler handler_; -}; - class InputState { // InputState correponds to an input // Input record batches are queued up in InputState until processed and @@ -783,29 +609,8 @@ class InputState { } inline OnType GetLatestTime() const { - return GetTime(GetLatestBatch().get(), latest_ref_row_); - } - - inline ByType GetTime(const RecordBatch* batch, row_index_t row) const { - auto data = batch->column_data(time_col_index_); - switch (time_type_id_) { - LATEST_VAL_CASE(INT8, time_value) - LATEST_VAL_CASE(INT16, time_value) - LATEST_VAL_CASE(INT32, time_value) - LATEST_VAL_CASE(INT64, time_value) - LATEST_VAL_CASE(UINT8, time_value) - LATEST_VAL_CASE(UINT16, time_value) - LATEST_VAL_CASE(UINT32, time_value) - LATEST_VAL_CASE(UINT64, time_value) - LATEST_VAL_CASE(DATE32, time_value) - LATEST_VAL_CASE(DATE64, time_value) - LATEST_VAL_CASE(TIME32, time_value) - LATEST_VAL_CASE(TIME64, time_value) - LATEST_VAL_CASE(TIMESTAMP, time_value) - default: - DCHECK(false); - return 0; // cannot happen - } + return GetTime(GetLatestBatch().get(), time_type_id_, time_col_index_, + latest_ref_row_); } #undef LATEST_VAL_CASE @@ -832,7 +637,9 @@ class InputState { have_active_batch &= !queue_.TryPop(); if (have_active_batch) { DCHECK_GT(queue_.UnsyncFront()->num_rows(), 0); // empty batches disallowed - memo_.UpdateTime(GetTime(queue_.UnsyncFront().get(), 0)); // time changed + memo_.UpdateTime(GetTime(queue_.UnsyncFront().get(), time_type_id_, + time_col_index_, + 0)); // time changed } } } @@ -988,35 +795,25 @@ class InputState { std::vector> src_to_dst_; }; +/// Wrapper around UnmaterializedCompositeTable that knows how to emplace +/// the join row-by-row template -struct CompositeReferenceRow { - struct Entry { - arrow::RecordBatch* batch; // can be NULL if there's no value - row_index_t row; - }; - Entry refs[MAX_TABLES]; -}; +class CompositeTableBuilder { + using SliceBuilder = UnmaterializedSliceBuilder; + using CompositeTable = UnmaterializedCompositeTable; -// A table of composite reference rows. Rows maintain pointers to the -// constituent record batches, but the overall table retains shared_ptr -// references to ensure memory remains resident while the table is live. -// -// The main reason for this is that, especially for wide tables, joins -// are effectively row-oriented, rather than column-oriented. Separating -// the join part from the columnar materialization part simplifies the -// logic around data types and increases efficiency. -// -// We don't put the shared_ptr's into the rows for efficiency reasons. -template -class CompositeReferenceTable { public: - NDEBUG_EXPLICIT CompositeReferenceTable(DEBUG_ADD(size_t n_tables, AsofJoinNode* node)) - : DEBUG_ADD(n_tables_(n_tables), node_(node)) { + NDEBUG_EXPLICIT CompositeTableBuilder( + const std::vector>& inputs, + const std::shared_ptr& schema, arrow::MemoryPool* pool, + DEBUG_ADD(size_t n_tables, AsofJoinNode* node)) + : unmaterialized_table(InitUnmaterializedTable(schema, inputs, pool)), + DEBUG_ADD(n_tables_(n_tables), node_(node)) { DCHECK_GE(n_tables_, 1); DCHECK_LE(n_tables_, MAX_TABLES); } - size_t n_rows() const { return rows_.size(); } + size_t n_rows() const { return unmaterialized_table.Size(); } // Adds the latest row from the input state as a new composite reference row // - LHS must have a valid key,timestep,and latest rows @@ -1037,14 +834,16 @@ class CompositeReferenceTable { // On the first row of the batch, we resize the destination. // The destination size is dictated by the size of the LHS batch. row_index_t new_batch_size = lhs_latest_batch->num_rows(); - row_index_t new_capacity = rows_.size() + new_batch_size; - if (rows_.capacity() < new_capacity) rows_.reserve(new_capacity); + row_index_t new_capacity = unmaterialized_table.Size() + new_batch_size; + if (unmaterialized_table.capacity() < new_capacity) { + unmaterialized_table.reserve(new_capacity); + } } - rows_.resize(rows_.size() + 1); - auto& row = rows_.back(); - row.refs[0].batch = lhs_latest_batch.get(); - row.refs[0].row = lhs_latest_row; - AddRecordBatchRef(lhs_latest_batch); + + SliceBuilder new_row{&unmaterialized_table}; + + // Each item represents a portion of the columns of the output table + new_row.AddEntry(lhs_latest_batch, lhs_latest_row, lhs_latest_row + 1); DEBUG_SYNC(node_, "Emplace: key=", key, " lhs_latest_row=", lhs_latest_row, " lhs_latest_time=", lhs_latest_time, DEBUG_MANIP(std::endl)); @@ -1068,100 +867,25 @@ class CompositeReferenceTable { if (tolerance.Accepts(lhs_latest_time, (*opt_entry)->time)) { // Have a valid entry const MemoStore::Entry* entry = *opt_entry; - row.refs[i].batch = entry->batch.get(); - row.refs[i].row = entry->row; - AddRecordBatchRef(entry->batch); + new_row.AddEntry(entry->batch, entry->row, entry->row + 1); continue; } } - row.refs[i].batch = NULL; - row.refs[i].row = 0; + new_row.AddEntry(nullptr, 0, 1); } + new_row.Finalize(); } // Materializes the current reference table into a target record batch - Result> Materialize( - MemoryPool* memory_pool, const std::shared_ptr& output_schema, - const std::vector>& state) { - DCHECK_EQ(state.size(), n_tables_); - - // Don't build empty batches - size_t n_rows = rows_.size(); - if (!n_rows) return NULLPTR; - - // Build the arrays column-by-column from the rows - std::vector> arrays(output_schema->num_fields()); - for (size_t i_table = 0; i_table < n_tables_; ++i_table) { - int n_src_cols = state.at(i_table)->get_schema()->num_fields(); - { - for (col_index_t i_src_col = 0; i_src_col < n_src_cols; ++i_src_col) { - std::optional i_dst_col_opt = - state[i_table]->MapSrcToDst(i_src_col); - if (!i_dst_col_opt) continue; - col_index_t i_dst_col = *i_dst_col_opt; - const auto& src_field = state[i_table]->get_schema()->field(i_src_col); - const auto& dst_field = output_schema->field(i_dst_col); - DCHECK(src_field->type()->Equals(dst_field->type())); - DCHECK_EQ(src_field->name(), dst_field->name()); - const auto& field_type = src_field->type(); - -#define ASOFJOIN_MATERIALIZE_CASE(id) \ - case Type::id: { \ - using T = typename TypeIdTraits::Type; \ - ARROW_ASSIGN_OR_RAISE( \ - arrays.at(i_dst_col), \ - MaterializeColumn(memory_pool, field_type, i_table, i_src_col)); \ - break; \ - } - - switch (field_type->id()) { - ASOFJOIN_MATERIALIZE_CASE(BOOL) - ASOFJOIN_MATERIALIZE_CASE(INT8) - ASOFJOIN_MATERIALIZE_CASE(INT16) - ASOFJOIN_MATERIALIZE_CASE(INT32) - ASOFJOIN_MATERIALIZE_CASE(INT64) - ASOFJOIN_MATERIALIZE_CASE(UINT8) - ASOFJOIN_MATERIALIZE_CASE(UINT16) - ASOFJOIN_MATERIALIZE_CASE(UINT32) - ASOFJOIN_MATERIALIZE_CASE(UINT64) - ASOFJOIN_MATERIALIZE_CASE(FLOAT) - ASOFJOIN_MATERIALIZE_CASE(DOUBLE) - ASOFJOIN_MATERIALIZE_CASE(DATE32) - ASOFJOIN_MATERIALIZE_CASE(DATE64) - ASOFJOIN_MATERIALIZE_CASE(TIME32) - ASOFJOIN_MATERIALIZE_CASE(TIME64) - ASOFJOIN_MATERIALIZE_CASE(TIMESTAMP) - ASOFJOIN_MATERIALIZE_CASE(STRING) - ASOFJOIN_MATERIALIZE_CASE(LARGE_STRING) - ASOFJOIN_MATERIALIZE_CASE(BINARY) - ASOFJOIN_MATERIALIZE_CASE(LARGE_BINARY) - default: - return Status::Invalid("Unsupported data type ", - src_field->type()->ToString(), " for field ", - src_field->name()); - } - -#undef ASOFJOIN_MATERIALIZE_CASE - } - } - } - - // Build the result - DCHECK_LE(n_rows, (uint64_t)std::numeric_limits::max()); - std::shared_ptr r = - arrow::RecordBatch::Make(output_schema, (int64_t)n_rows, arrays); - return r; + Result>> Materialize() { + return unmaterialized_table.Materialize(); } // Returns true if there are no rows - bool empty() const { return rows_.empty(); } + bool empty() const { return unmaterialized_table.Empty(); } private: - // Contains shared_ptr refs for all RecordBatches referred to by the contents of rows_ - std::unordered_map> _ptr2ref; - - // Row table references - std::vector> rows_; + CompositeTable unmaterialized_table; // Total number of tables in the composite table size_t n_tables_; @@ -1171,70 +895,20 @@ class CompositeReferenceTable { AsofJoinNode* node_; #endif - // Adds a RecordBatch ref to the mapping, if needed - void AddRecordBatchRef(const std::shared_ptr& ref) { - if (!_ptr2ref.count((uintptr_t)ref.get())) _ptr2ref[(uintptr_t)ref.get()] = ref; - } - - template ::BuilderType> - enable_if_boolean static BuilderAppend( - Builder& builder, const std::shared_ptr& source, row_index_t row) { - if (source->IsNull(row)) { - builder.UnsafeAppendNull(); - return Status::OK(); - } - builder.UnsafeAppend(bit_util::GetBit(source->template GetValues(1), row)); - return Status::OK(); - } - - template ::BuilderType> - enable_if_t::value && !is_boolean_type::value, - Status> static BuilderAppend(Builder& builder, - const std::shared_ptr& source, - row_index_t row) { - if (source->IsNull(row)) { - builder.UnsafeAppendNull(); - return Status::OK(); - } - using CType = typename TypeTraits::CType; - builder.UnsafeAppend(source->template GetValues(1)[row]); - return Status::OK(); - } - - template ::BuilderType> - enable_if_base_binary static BuilderAppend( - Builder& builder, const std::shared_ptr& source, row_index_t row) { - if (source->IsNull(row)) { - return builder.AppendNull(); - } - using offset_type = typename Type::offset_type; - const uint8_t* data = source->buffers[2]->data(); - const offset_type* offsets = source->GetValues(1); - const offset_type offset0 = offsets[row]; - const offset_type offset1 = offsets[row + 1]; - return builder.Append(data + offset0, offset1 - offset0); - } - - template ::BuilderType> - Result> MaterializeColumn(MemoryPool* memory_pool, - const std::shared_ptr& type, - size_t i_table, col_index_t i_col) { - ARROW_ASSIGN_OR_RAISE(auto a_builder, MakeBuilder(type, memory_pool)); - Builder& builder = *checked_cast(a_builder.get()); - ARROW_RETURN_NOT_OK(builder.Reserve(rows_.size())); - for (row_index_t i_row = 0; i_row < rows_.size(); ++i_row) { - const auto& ref = rows_[i_row].refs[i_table]; - if (ref.batch) { - Status st = - BuilderAppend(builder, ref.batch->column_data(i_col), ref.row); - ARROW_RETURN_NOT_OK(st); - } else { - builder.UnsafeAppendNull(); + static CompositeTable InitUnmaterializedTable( + const std::shared_ptr& schema, + const std::vector>& inputs, arrow::MemoryPool* pool) { + std::unordered_map> dst_to_src; + for (size_t i = 0; i < inputs.size(); i++) { + auto& input = inputs[i]; + for (int src = 0; src < input->get_schema()->num_fields(); src++) { + auto dst = input->MapSrcToDst(src); + if (dst.has_value()) { + dst_to_src[dst.value()] = std::make_pair(static_cast(i), src); + } } } - std::shared_ptr result; - ARROW_RETURN_NOT_OK(builder.Finish(&result)); - return result; + return CompositeTable{schema, inputs.size(), dst_to_src, pool}; } }; @@ -1279,7 +953,9 @@ class AsofJoinNode : public ExecNode { auto& lhs = *state_.at(0); // Construct new target table if needed - CompositeReferenceTable dst(DEBUG_ADD(state_.size(), this)); + CompositeTableBuilder dst(state_, output_schema_, + plan()->query_context()->memory_pool(), + DEBUG_ADD(state_.size(), this)); // Generate rows into the dst table until we either run out of data or hit the row // limit, or run out of input @@ -1318,8 +994,8 @@ class AsofJoinNode : public ExecNode { if (dst.empty()) { return NULLPTR; } else { - return dst.Materialize(plan()->query_context()->memory_pool(), output_schema(), - state_); + ARROW_ASSIGN_OR_RAISE(auto out, dst.Materialize()); + return out.has_value() ? out.value() : NULLPTR; } } diff --git a/cpp/src/arrow/acero/backpressure_handler.h b/cpp/src/arrow/acero/backpressure_handler.h new file mode 100644 index 0000000000000..178272315d7fb --- /dev/null +++ b/cpp/src/arrow/acero/backpressure_handler.h @@ -0,0 +1,74 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#pragma once +#include "arrow/acero/exec_plan.h" +#include "arrow/acero/options.h" + +#include + +namespace arrow::acero { + +class BackpressureHandler { + private: + BackpressureHandler(ExecNode* input, size_t low_threshold, size_t high_threshold, + std::unique_ptr backpressure_control) + : input_(input), + low_threshold_(low_threshold), + high_threshold_(high_threshold), + backpressure_control_(std::move(backpressure_control)) {} + + public: + static Result Make( + ExecNode* input, size_t low_threshold, size_t high_threshold, + std::unique_ptr backpressure_control) { + if (low_threshold >= high_threshold) { + return Status::Invalid("low threshold (", low_threshold, + ") must be less than high threshold (", high_threshold, ")"); + } + if (backpressure_control == NULLPTR) { + return Status::Invalid("null backpressure control parameter"); + } + BackpressureHandler backpressure_handler(input, low_threshold, high_threshold, + std::move(backpressure_control)); + return std::move(backpressure_handler); + } + + void Handle(size_t start_level, size_t end_level) { + if (start_level < high_threshold_ && end_level >= high_threshold_) { + backpressure_control_->Pause(); + } else if (start_level > low_threshold_ && end_level <= low_threshold_) { + backpressure_control_->Resume(); + } + } + + Status ForceShutdown() { + // It may be unintuitive to call Resume() here, but this is to avoid a deadlock. + // Since acero's executor won't terminate if any one node is paused, we need to + // force resume the node before stopping production. + backpressure_control_->Resume(); + return input_->StopProducing(); + } + + private: + ExecNode* input_; + size_t low_threshold_; + size_t high_threshold_; + std::unique_ptr backpressure_control_; +}; + +} // namespace arrow::acero diff --git a/cpp/src/arrow/acero/concurrent_queue_internal.h b/cpp/src/arrow/acero/concurrent_queue_internal.h new file mode 100644 index 0000000000000..f530394187299 --- /dev/null +++ b/cpp/src/arrow/acero/concurrent_queue_internal.h @@ -0,0 +1,161 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#pragma once + +#include +#include +#include +#include "arrow/acero/backpressure_handler.h" + +namespace arrow::acero { + +/** + * Simple implementation for a thread safe blocking unbound multi-consumer / + * multi-producer concurrent queue + */ +template +class ConcurrentQueue { + public: + // Pops the last item from the queue. Must be called on a non-empty queue + // + T Pop() { + std::unique_lock lock(mutex_); + cond_.wait(lock, [&] { return !queue_.empty(); }); + return PopUnlocked(); + } + + // Pops the last item from the queue, or returns a nullopt if empty + // + std::optional TryPop() { + std::unique_lock lock(mutex_); + return TryPopUnlocked(); + } + + // Pushes an item to the queue + // + void Push(const T& item) { + std::unique_lock lock(mutex_); + return PushUnlocked(item); + } + + // Clears the queue + // + void Clear() { + std::unique_lock lock(mutex_); + ClearUnlocked(); + } + + bool Empty() const { + std::unique_lock lock(mutex_); + return queue_.empty(); + } + + // Un-synchronized access to front + // For this to be "safe": + // 1) the caller logically guarantees that queue is not empty + // 2) pop/try_pop cannot be called concurrently with this + const T& UnsyncFront() const { return queue_.front(); } + + size_t UnsyncSize() const { return queue_.size(); } + + protected: + std::mutex& GetMutex() { return mutex_; } + + T PopUnlocked() { + auto item = queue_.front(); + queue_.pop(); + return item; + } + + void PushUnlocked(const T& item) { + queue_.push(item); + cond_.notify_one(); + } + + void ClearUnlocked() { queue_ = std::queue(); } + + std::optional TryPopUnlocked() { + // Try to pop the oldest value from the queue (or return nullopt if none) + if (queue_.empty()) { + return std::nullopt; + } else { + auto item = queue_.front(); + queue_.pop(); + return item; + } + } + std::queue queue_; + + private: + mutable std::mutex mutex_; + std::condition_variable cond_; +}; + +template +class BackpressureConcurrentQueue : public ConcurrentQueue { + private: + struct DoHandle { + explicit DoHandle(BackpressureConcurrentQueue& queue) + : queue_(queue), start_size_(queue_.UnsyncSize()) {} + + ~DoHandle() { + // unsynced access is safe since DoHandle is internally only used when the + // lock is held + size_t end_size = queue_.UnsyncSize(); + queue_.handler_.Handle(start_size_, end_size); + } + + BackpressureConcurrentQueue& queue_; + size_t start_size_; + }; + + public: + explicit BackpressureConcurrentQueue(BackpressureHandler handler) + : handler_(std::move(handler)) {} + + T Pop() { + std::unique_lock lock(ConcurrentQueue::GetMutex()); + DoHandle do_handle(*this); + return ConcurrentQueue::PopUnlocked(); + } + + void Push(const T& item) { + std::unique_lock lock(ConcurrentQueue::GetMutex()); + DoHandle do_handle(*this); + ConcurrentQueue::PushUnlocked(item); + } + + void Clear() { + std::unique_lock lock(ConcurrentQueue::GetMutex()); + DoHandle do_handle(*this); + ConcurrentQueue::ClearUnlocked(); + } + + std::optional TryPop() { + std::unique_lock lock(ConcurrentQueue::GetMutex()); + DoHandle do_handle(*this); + return ConcurrentQueue::TryPopUnlocked(); + } + + Status ForceShutdown() { return handler_.ForceShutdown(); } + + private: + BackpressureHandler handler_; +}; + +} // namespace arrow::acero diff --git a/cpp/src/arrow/acero/exec_plan.cc b/cpp/src/arrow/acero/exec_plan.cc index 541e5fed6206b..97119726d4b17 100644 --- a/cpp/src/arrow/acero/exec_plan.cc +++ b/cpp/src/arrow/acero/exec_plan.cc @@ -1114,6 +1114,7 @@ void RegisterAggregateNode(ExecFactoryRegistry*); void RegisterSinkNode(ExecFactoryRegistry*); void RegisterHashJoinNode(ExecFactoryRegistry*); void RegisterAsofJoinNode(ExecFactoryRegistry*); +void RegisterSortedMergeNode(ExecFactoryRegistry*); } // namespace internal @@ -1132,6 +1133,7 @@ ExecFactoryRegistry* default_exec_factory_registry() { internal::RegisterSinkNode(this); internal::RegisterHashJoinNode(this); internal::RegisterAsofJoinNode(this); + internal::RegisterSortedMergeNode(this); } Result GetFactory(const std::string& factory_name) override { diff --git a/cpp/src/arrow/acero/sorted_merge_node.cc b/cpp/src/arrow/acero/sorted_merge_node.cc new file mode 100644 index 0000000000000..f3b934eda186b --- /dev/null +++ b/cpp/src/arrow/acero/sorted_merge_node.cc @@ -0,0 +1,609 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include +#include +#include +#include +#include +#include +#include +#include +#include "arrow/acero/concurrent_queue_internal.h" +#include "arrow/acero/exec_plan.h" +#include "arrow/acero/options.h" +#include "arrow/acero/query_context.h" +#include "arrow/acero/time_series_util.h" +#include "arrow/acero/unmaterialized_table.h" +#include "arrow/acero/util.h" +#include "arrow/array/builder_base.h" +#include "arrow/result.h" +#include "arrow/type_fwd.h" +#include "arrow/util/logging.h" + +namespace { +template +struct Defer { + Callable callable; + explicit Defer(Callable callable_) : callable(std::move(callable_)) {} + ~Defer() noexcept { callable(); } +}; + +std::vector GetInputLabels( + const arrow::acero::ExecNode::NodeVector& inputs) { + std::vector labels(inputs.size()); + for (size_t i = 0; i < inputs.size(); i++) { + labels[i] = "input_" + std::to_string(i) + "_label"; + } + return labels; +} + +template +inline typename T::const_iterator std_find(const T& container, const V& val) { + return std::find(container.begin(), container.end(), val); +} + +template +inline bool std_has(const T& container, const V& val) { + return container.end() != std_find(container, val); +} + +} // namespace + +namespace arrow::acero { + +namespace { + +// Each slice is associated with a single input source, so we only need 1 record +// batch per slice +using SingleRecordBatchSliceBuilder = arrow::acero::UnmaterializedSliceBuilder<1>; +using SingleRecordBatchCompositeTable = arrow::acero::UnmaterializedCompositeTable<1>; + +using row_index_t = uint64_t; +using time_unit_t = uint64_t; +using col_index_t = int; + +constexpr bool kNewTask = true; +constexpr bool kPoisonPill = false; + +class BackpressureController : public BackpressureControl { + public: + BackpressureController(ExecNode* node, ExecNode* output, + std::atomic& backpressure_counter) + : node_(node), output_(output), backpressure_counter_(backpressure_counter) {} + + void Pause() override { node_->PauseProducing(output_, ++backpressure_counter_); } + void Resume() override { node_->ResumeProducing(output_, ++backpressure_counter_); } + + private: + ExecNode* node_; + ExecNode* output_; + std::atomic& backpressure_counter_; +}; + +/// InputState correponds to an input. Input record batches are queued up in InputState +/// until processed and turned into output record batches. +class InputState { + public: + InputState(size_t index, BackpressureHandler handler, + const std::shared_ptr& schema, const int time_col_index) + : index_(index), + queue_(std::move(handler)), + schema_(schema), + time_col_index_(time_col_index), + time_type_id_(schema_->fields()[time_col_index_]->type()->id()) {} + + template + static arrow::Result Make(size_t index, arrow::acero::ExecNode* input, + arrow::acero::ExecNode* output, + std::atomic& backpressure_counter, + const std::shared_ptr& schema, + const col_index_t time_col_index) { + constexpr size_t low_threshold = 4, high_threshold = 8; + std::unique_ptr backpressure_control = + std::make_unique(input, output, backpressure_counter); + ARROW_ASSIGN_OR_RAISE(auto handler, + BackpressureHandler::Make(input, low_threshold, high_threshold, + std::move(backpressure_control))); + return PtrType(new InputState(index, std::move(handler), schema, time_col_index)); + } + + bool IsTimeColumn(col_index_t i) const { + DCHECK_LT(i, schema_->num_fields()); + return (i == time_col_index_); + } + + // Gets the latest row index, assuming the queue isn't empty + row_index_t GetLatestRow() const { return latest_ref_row_; } + + bool Empty() const { + // cannot be empty if ref row is >0 -- can avoid slow queue lock + // below + if (latest_ref_row_ > 0) { + return false; + } + return queue_.Empty(); + } + + size_t index() const { return index_; } + + int total_batches() const { return total_batches_; } + + // Gets latest batch (precondition: must not be empty) + const std::shared_ptr& GetLatestBatch() const { + return queue_.UnsyncFront(); + } + +#define LATEST_VAL_CASE(id, val) \ + case arrow::Type::id: { \ + using T = typename arrow::TypeIdTraits::Type; \ + using CType = typename arrow::TypeTraits::CType; \ + return val(data->GetValues(1)[row]); \ + } + + inline time_unit_t GetLatestTime() const { + return GetTime(GetLatestBatch().get(), time_type_id_, time_col_index_, + latest_ref_row_); + } + +#undef LATEST_VAL_CASE + + bool Finished() const { return batches_processed_ == total_batches_; } + + void Advance(SingleRecordBatchSliceBuilder& builder) { + // Advance the row until a new time is encountered or the record batch + // ends. This will return a range of {-1, -1} and a nullptr if there is + // no input + bool active = + (latest_ref_row_ > 0 /*short circuit the lock on the queue*/) || !queue_.Empty(); + + if (!active) { + return; + } + + row_index_t start = latest_ref_row_; + row_index_t end = latest_ref_row_; + time_unit_t startTime = GetLatestTime(); + std::shared_ptr batch = queue_.UnsyncFront(); + auto rows_in_batch = (row_index_t)batch->num_rows(); + + while (GetLatestTime() == startTime) { + end = ++latest_ref_row_; + if (latest_ref_row_ >= rows_in_batch) { + // hit the end of the batch, need to get the next batch if + // possible. + ++batches_processed_; + latest_ref_row_ = 0; + active &= !queue_.TryPop(); + if (active) { + DCHECK_GT(queue_.UnsyncFront()->num_rows(), + 0); // empty batches disallowed, sanity check + } + break; + } + } + builder.AddEntry(batch, start, end); + } + + arrow::Status Push(const std::shared_ptr& rb) { + if (rb->num_rows() > 0) { + queue_.Push(rb); + } else { + ++batches_processed_; // don't enqueue empty batches, just record + // as processed + } + return arrow::Status::OK(); + } + + const std::shared_ptr& get_schema() const { return schema_; } + + void set_total_batches(int n) { total_batches_ = n; } + + private: + size_t index_; + // Pending record batches. The latest is the front. Batches cannot be empty. + BackpressureConcurrentQueue> queue_; + // Schema associated with the input + std::shared_ptr schema_; + // Total number of batches (only int because InputFinished uses int) + std::atomic total_batches_{-1}; + // Number of batches processed so far (only int because InputFinished uses + // int) + std::atomic batches_processed_{0}; + // Index of the time col + col_index_t time_col_index_; + // Type id of the time column + arrow::Type::type time_type_id_; + // Index of the latest row reference within; if >0 then queue_ cannot be + // empty Must be < queue_.front()->num_rows() if queue_ is non-empty + row_index_t latest_ref_row_ = 0; + // Time of latest row + time_unit_t latest_time_ = std::numeric_limits::lowest(); +}; + +struct InputStateComparator { + bool operator()(const std::shared_ptr& lhs, + const std::shared_ptr& rhs) const { + // True if lhs is ahead of time of rhs + if (lhs->Finished()) { + return false; + } + if (rhs->Finished()) { + return false; + } + time_unit_t lFirst = lhs->GetLatestTime(); + time_unit_t rFirst = rhs->GetLatestTime(); + return lFirst > rFirst; + } +}; + +class SortedMergeNode : public ExecNode { + static constexpr int64_t kTargetOutputBatchSize = 1024 * 1024; + + public: + SortedMergeNode(arrow::acero::ExecPlan* plan, + std::vector inputs, + std::shared_ptr output_schema, + arrow::Ordering new_ordering) + : ExecNode(plan, inputs, GetInputLabels(inputs), std::move(output_schema)), + ordering_(std::move(new_ordering)), + input_counter(inputs_.size()), + output_counter(inputs_.size()), + process_thread() { + SetLabel("sorted_merge"); + } + + ~SortedMergeNode() override { + process_queue.Push( + kPoisonPill); // poison pill + // We might create a temporary (such as to inspect the output + // schema), in which case there isn't anything to join + if (process_thread.joinable()) { + process_thread.join(); + } + } + + static arrow::Result Make( + arrow::acero::ExecPlan* plan, std::vector inputs, + const arrow::acero::ExecNodeOptions& options) { + RETURN_NOT_OK(ValidateExecNodeInputs(plan, inputs, static_cast(inputs.size()), + "SortedMergeNode")); + + if (inputs.size() < 1) { + return Status::Invalid("Constructing a `SortedMergeNode` with < 1 inputs"); + } + + const auto schema = inputs.at(0)->output_schema(); + for (const auto& input : inputs) { + if (!input->output_schema()->Equals(schema)) { + return Status::Invalid( + "SortedMergeNode input schemas must all " + "match, first schema " + "was: ", + schema->ToString(), " got schema: ", input->output_schema()->ToString()); + } + } + + const auto& order_options = + arrow::internal::checked_cast(options); + + if (order_options.ordering.is_implicit() || order_options.ordering.is_unordered()) { + return Status::Invalid("`ordering` must be an explicit non-empty ordering"); + } + + std::shared_ptr output_schema = inputs[0]->output_schema(); + return plan->EmplaceNode( + plan, std::move(inputs), std::move(output_schema), order_options.ordering); + } + + const char* kind_name() const override { return "SortedMergeNode"; } + + const arrow::Ordering& ordering() const override { return ordering_; } + + arrow::Status Init() override { + ARROW_CHECK(ordering_.sort_keys().size() == 1) << "Only one sort key supported"; + + auto inputs = this->inputs(); + for (size_t i = 0; i < inputs.size(); i++) { + ExecNode* input = inputs[i]; + const auto& schema = input->output_schema(); + + const auto& sort_key = ordering_.sort_keys()[0]; + if (sort_key.order != arrow::compute::SortOrder::Ascending) { + return Status::NotImplemented("Only ascending sort order is supported"); + } + + const FieldRef& ref = sort_key.target; + auto match_res = ref.FindOne(*schema); + if (!match_res.ok()) { + return Status::Invalid("Bad sort key : ", match_res.status().message()); + } + ARROW_ASSIGN_OR_RAISE(auto match, match_res); + ARROW_DCHECK(match.indices().size() == 1); + + ARROW_ASSIGN_OR_RAISE(auto input_state, + InputState::Make>( + i, input, this, backpressure_counter, schema, + std::move(match.indices()[0]))); + state.push_back(std::move(input_state)); + } + return Status::OK(); + } + + arrow::Status InputReceived(arrow::acero::ExecNode* input, + arrow::ExecBatch batch) override { + ARROW_DCHECK(std_has(inputs_, input)); + const size_t index = std_find(inputs_, input) - inputs_.begin(); + ARROW_ASSIGN_OR_RAISE(std::shared_ptr rb, + batch.ToRecordBatch(output_schema_)); + + // Push into the queue. Note that we don't need to lock since + // InputState's ConcurrentQueue manages locking + input_counter[index] += rb->num_rows(); + ARROW_RETURN_NOT_OK(state[index]->Push(rb)); + process_queue.Push(kNewTask); + return Status::OK(); + } + + arrow::Status InputFinished(arrow::acero::ExecNode* input, int total_batches) override { + ARROW_DCHECK(std_has(inputs_, input)); + { + std::lock_guard guard(gate); + ARROW_DCHECK(std_has(inputs_, input)); + size_t k = std_find(inputs_, input) - inputs_.begin(); + state.at(k)->set_total_batches(total_batches); + } + // Trigger a final process call for stragglers + process_queue.Push(kNewTask); + return Status::OK(); + } + + arrow::Status StartProducing() override { + ARROW_ASSIGN_OR_RAISE(process_task, plan_->query_context()->BeginExternalTask( + "SortedMergeNode::ProcessThread")); + if (!process_task.is_valid()) { + // Plan has already aborted. Do not start process thread + return Status::OK(); + } + process_thread = std::thread(&SortedMergeNode::StartPoller, this); + return Status::OK(); + } + + arrow::Status StopProducingImpl() override { + process_queue.Clear(); + process_queue.Push(kPoisonPill); + return Status::OK(); + } + + // handled by the backpressure controller + void PauseProducing(arrow::acero::ExecNode* output, int32_t counter) override {} + void ResumeProducing(arrow::acero::ExecNode* output, int32_t counter) override {} + + protected: + std::string ToStringExtra(int indent) const override { + std::stringstream ss; + ss << "ordering=" << ordering_.ToString(); + return ss.str(); + } + + private: + void EndFromProcessThread(arrow::Status st = arrow::Status::OK()) { + ARROW_CHECK(!cleanup_started); + for (size_t i = 0; i < input_counter.size(); ++i) { + ARROW_CHECK(input_counter[i] == output_counter[i]) + << input_counter[i] << " != " << output_counter[i]; + } + + ARROW_UNUSED( + plan_->query_context()->executor()->Spawn([this, st = std::move(st)]() mutable { + Defer cleanup([this, &st]() { process_task.MarkFinished(st); }); + if (st.ok()) { + st = output_->InputFinished(this, batches_produced); + } + })); + } + + bool CheckEnded() { + bool all_finished = true; + for (const auto& s : state) { + all_finished &= s->Finished(); + } + if (all_finished) { + EndFromProcessThread(); + return false; + } + return true; + } + + /// Streams the input states in sorted order until we run out of input + arrow::Result> getNextBatch() { + DCHECK(!state.empty()); + for (const auto& s : state) { + if (s->Empty() && !s->Finished()) { + return nullptr; // not enough data, wait + } + } + + std::vector> heap = state; + // filter out finished states + heap.erase(std::remove_if( + heap.begin(), heap.end(), + [](const std::shared_ptr& s) { return s->Finished(); }), + heap.end()); + + // If any are Empty(), then return early since we don't have enough data + if (std::any_of(heap.begin(), heap.end(), + [](const std::shared_ptr& s) { return s->Empty(); })) { + return nullptr; + } + + // Currently we only support one sort key + const auto sort_col = *ordering_.sort_keys().at(0).target.name(); + const auto comp = InputStateComparator(); + std::make_heap(heap.begin(), heap.end(), comp); + + // Each slice only has one record batch with the same schema as the output + std::unordered_map> output_col_to_src; + for (int i = 0; i < output_schema_->num_fields(); i++) { + output_col_to_src[i] = std::make_pair(0, i); + } + SingleRecordBatchCompositeTable output(output_schema(), 1, + std::move(output_col_to_src), + plan()->query_context()->memory_pool()); + + // Generate rows until we run out of data or we exceed the target output + // size + bool waiting_for_more_data = false; + while (!waiting_for_more_data && !heap.empty() && + output.Size() < kTargetOutputBatchSize) { + std::pop_heap(heap.begin(), heap.end(), comp); + + auto& next_item = heap.back(); + time_unit_t latest_time = std::numeric_limits::min(); + time_unit_t new_time = next_item->GetLatestTime(); + ARROW_CHECK(new_time >= latest_time) + << "Input state " << next_item->index() + << " has out of order data. newTime=" << new_time + << " latestTime=" << latest_time; + + latest_time = new_time; + SingleRecordBatchSliceBuilder builder{&output}; + next_item->Advance(builder); + + if (builder.Size() > 0) { + output_counter[next_item->index()] += builder.Size(); + builder.Finalize(); + } + if (next_item->Finished()) { + heap.pop_back(); + } else if (next_item->Empty()) { + // We've run out of data on one of the inputs + waiting_for_more_data = true; + continue; // skip the unnecessary make_heap + } + std::make_heap(heap.begin(), heap.end(), comp); + } + + // Emit the batch + if (output.Size() == 0) { + return nullptr; + } + + ARROW_ASSIGN_OR_RAISE(auto maybe_rb, output.Materialize()); + return maybe_rb.value_or(nullptr); + } + /// Gets a batch. Returns true if there is more data to process, false if we + /// are done or an error occurred + bool PollOnce() { + std::lock_guard guard(gate); + if (!CheckEnded()) { + return false; + } + + // Process batches while we have data + for (;;) { + Result> result = getNextBatch(); + + if (result.ok()) { + auto out_rb = *result; + if (!out_rb) { + break; + } + ExecBatch out_b(*out_rb); + out_b.index = batches_produced++; + Status st = output_->InputReceived(this, std::move(out_b)); + if (!st.ok()) { + ARROW_LOG(FATAL) << "Error in output_::InputReceived: " << st.ToString(); + EndFromProcessThread(std::move(st)); + } + } else { + EndFromProcessThread(result.status()); + return false; + } + } + + // Report to the output the total batch count, if we've already + // finished everything (there are two places where this can happen: + // here and InputFinished) + // + // It may happen here in cases where InputFinished was called before + // we were finished producing results (so we didn't know the output + // size at that time) + if (!CheckEnded()) { + return false; + } + + // There is no more we can do now but there is still work remaining + // for later when more data arrives. + return true; + } + + void EmitBatches() { + while (true) { + // Implementation note: If the queue is empty, we will block here + if (process_queue.Pop() == kPoisonPill) { + EndFromProcessThread(); + } + // Either we're out of data or something went wrong + if (!PollOnce()) { + return; + } + } + } + + /// The entry point for processThread + static void StartPoller(SortedMergeNode* node) { node->EmitBatches(); } + + arrow::Ordering ordering_; + + // Each input state corresponds to an input (e.g. a parquet data file) + std::vector> state; + std::vector input_counter; + std::vector output_counter; + std::mutex gate; + + std::atomic cleanup_started{false}; + + // Backpressure counter common to all input states + std::atomic backpressure_counter; + + std::atomic batches_produced{0}; + + // Queue to trigger processing of a given input. False acts as a poison pill + ConcurrentQueue process_queue; + // Once StartProducing is called, we initialize this thread to poll the + // input states and emit batches + std::thread process_thread; + arrow::Future<> process_task; + + // Map arg index --> completion counter + std::vector counter_; + // Map arg index --> data + std::vector accumulation_queue_; + std::mutex mutex_; + std::atomic total_batches_{0}; +}; + +} // namespace + +namespace internal { +void RegisterSortedMergeNode(ExecFactoryRegistry* registry) { + DCHECK_OK(registry->AddFactory("sorted_merge", SortedMergeNode::Make)); +} +} // namespace internal + +} // namespace arrow::acero diff --git a/cpp/src/arrow/acero/sorted_merge_node_test.cc b/cpp/src/arrow/acero/sorted_merge_node_test.cc new file mode 100644 index 0000000000000..55446d631d90c --- /dev/null +++ b/cpp/src/arrow/acero/sorted_merge_node_test.cc @@ -0,0 +1,87 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include + +#include "arrow/acero/exec_plan.h" +#include "arrow/acero/map_node.h" +#include "arrow/acero/options.h" +#include "arrow/acero/test_nodes.h" +#include "arrow/array/builder_base.h" +#include "arrow/array/concatenate.h" +#include "arrow/compute/ordering.h" +#include "arrow/result.h" +#include "arrow/scalar.h" +#include "arrow/table.h" +#include "arrow/testing/generator.h" +#include "arrow/testing/gtest_util.h" +#include "arrow/type.h" +#include "arrow/type_fwd.h" + +namespace arrow::acero { + +std::shared_ptr TestTable(int start, int step, int rows_per_batch, + int num_batches) { + return gen::Gen({{"timestamp", gen::Step(start, step, /*signed_int=*/true)}, + {"str", gen::Random(utf8())}}) + ->FailOnError() + ->Table(rows_per_batch, num_batches); +} + +TEST(SortedMergeNode, Basic) { + auto table1 = TestTable( + /*start=*/0, + /*step=*/2, + /*rows_per_batch=*/2, + /*num_batches=*/3); + auto table2 = TestTable( + /*start=*/1, + /*step=*/2, + /*rows_per_batch=*/3, + /*num_batches=*/2); + auto table3 = TestTable( + /*start=*/3, + /*step=*/3, + /*rows_per_batch=*/6, + /*num_batches=*/1); + std::vector src_decls; + src_decls.emplace_back(Declaration("table_source", TableSourceNodeOptions(table1))); + src_decls.emplace_back(Declaration("table_source", TableSourceNodeOptions(table2))); + src_decls.emplace_back(Declaration("table_source", TableSourceNodeOptions(table3))); + + auto ops = OrderByNodeOptions(compute::Ordering({compute::SortKey("timestamp")})); + + Declaration sorted_merge{"sorted_merge", src_decls, ops}; + // We can't use threads for sorted merging since it relies on + // ascending deterministic order of timestamps + ASSERT_OK_AND_ASSIGN(auto output, + DeclarationToTable(sorted_merge, /*use_threads=*/false)); + ASSERT_EQ(output->num_rows(), 18); + + ASSERT_OK_AND_ASSIGN(auto expected_ts_builder, + MakeBuilder(int32(), default_memory_pool())); + for (auto i : {0, 1, 2, 3, 3, 4, 5, 6, 6, 7, 8, 9, 9, 10, 11, 12, 15, 18}) { + ASSERT_OK(expected_ts_builder->AppendScalar(*MakeScalar(i))); + } + ASSERT_OK_AND_ASSIGN(auto expected_ts, expected_ts_builder->Finish()); + auto output_col = output->column(0); + ASSERT_OK_AND_ASSIGN(auto output_ts, Concatenate(output_col->chunks())); + + AssertArraysEqual(*expected_ts, *output_ts); +} + +} // namespace arrow::acero diff --git a/cpp/src/arrow/acero/time_series_util.cc b/cpp/src/arrow/acero/time_series_util.cc new file mode 100644 index 0000000000000..71133fef47306 --- /dev/null +++ b/cpp/src/arrow/acero/time_series_util.cc @@ -0,0 +1,63 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include "arrow/array/data.h" + +#include "arrow/acero/time_series_util.h" +#include "arrow/util/logging.h" + +namespace arrow::acero { + +template ::value, bool>> +inline uint64_t NormalizeTime(T t) { + uint64_t bias = + std::is_signed::value ? static_cast(1) << (8 * sizeof(T) - 1) : 0; + return t < 0 ? static_cast(t + bias) : static_cast(t); +} + +uint64_t GetTime(const RecordBatch* batch, Type::type time_type, int col, uint64_t row) { +#define LATEST_VAL_CASE(id, val) \ + case Type::id: { \ + using T = typename TypeIdTraits::Type; \ + using CType = typename TypeTraits::CType; \ + return val(data->GetValues(1)[row]); \ + } + + auto data = batch->column_data(col); + switch (time_type) { + LATEST_VAL_CASE(INT8, NormalizeTime) + LATEST_VAL_CASE(INT16, NormalizeTime) + LATEST_VAL_CASE(INT32, NormalizeTime) + LATEST_VAL_CASE(INT64, NormalizeTime) + LATEST_VAL_CASE(UINT8, NormalizeTime) + LATEST_VAL_CASE(UINT16, NormalizeTime) + LATEST_VAL_CASE(UINT32, NormalizeTime) + LATEST_VAL_CASE(UINT64, NormalizeTime) + LATEST_VAL_CASE(DATE32, NormalizeTime) + LATEST_VAL_CASE(DATE64, NormalizeTime) + LATEST_VAL_CASE(TIME32, NormalizeTime) + LATEST_VAL_CASE(TIME64, NormalizeTime) + LATEST_VAL_CASE(TIMESTAMP, NormalizeTime) + default: + DCHECK(false); + return 0; // cannot happen + } + +#undef LATEST_VAL_CASE +} + +} // namespace arrow::acero diff --git a/cpp/src/arrow/acero/time_series_util.h b/cpp/src/arrow/acero/time_series_util.h new file mode 100644 index 0000000000000..97707f43bf20b --- /dev/null +++ b/cpp/src/arrow/acero/time_series_util.h @@ -0,0 +1,31 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#pragma once + +#include "arrow/record_batch.h" +#include "arrow/type_traits.h" + +namespace arrow::acero { + +// normalize the value to unsigned 64-bits while preserving ordering of values +template ::value, bool> = true> +uint64_t NormalizeTime(T t); + +uint64_t GetTime(const RecordBatch* batch, Type::type time_type, int col, uint64_t row); + +} // namespace arrow::acero diff --git a/cpp/src/arrow/acero/unmaterialized_table.h b/cpp/src/arrow/acero/unmaterialized_table.h new file mode 100644 index 0000000000000..05d6c866936e0 --- /dev/null +++ b/cpp/src/arrow/acero/unmaterialized_table.h @@ -0,0 +1,271 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#pragma once + +#include +#include +#include "arrow/array/builder_base.h" +#include "arrow/array/builder_binary.h" +#include "arrow/array/builder_primitive.h" +#include "arrow/memory_pool.h" +#include "arrow/record_batch.h" +#include "arrow/type_traits.h" +#include "arrow/util/logging.h" + +namespace arrow::acero { + +/// Lightweight representation of a cell of an unmaterialized table. +/// +struct CompositeEntry { + RecordBatch* batch; + uint64_t start; + uint64_t end; +}; + +// Forward declare the builder +template +class UnmaterializedSliceBuilder; + +/// A table of composite reference rows. Rows maintain pointers to the +/// constituent record batches, but the overall table retains shared_ptr +/// references to ensure memory remains resident while the table is live. +/// +/// The main reason for this is that, especially for wide tables, some operations +/// such as sorted_merge or asof_join are effectively row-oriented, rather than +/// column-oriented. Separating the join part from the columnar materialization +/// part simplifies the logic around data types and increases efficiency. +/// +/// We don't put the shared_ptr's into the rows for efficiency reasons. Use +/// UnmaterializedSliceBuilder to add ranges of record batches to this table +template +class UnmaterializedCompositeTable { + public: + UnmaterializedCompositeTable( + const std::shared_ptr& output_schema, size_t num_composite_tables, + std::unordered_map> output_col_to_src_, + arrow::MemoryPool* pool_ = arrow::default_memory_pool()) + : schema(output_schema), + num_composite_tables(num_composite_tables), + output_col_to_src(std::move(output_col_to_src_)), + pool{pool_} {} + + // Shallow wrappers around std::vector for performance + inline size_t capacity() { return slices.capacity(); } + inline void reserve(size_t num_slices) { slices.reserve(num_slices); } + + inline size_t Size() const { return num_rows; } + inline size_t Empty() const { return num_rows == 0; } + + Result>> Materialize() { + // Don't build empty batches + if (Empty()) { + return std::nullopt; + } + DCHECK_LE(Size(), (uint64_t)std::numeric_limits::max()); + std::vector> arrays(schema->num_fields()); + +#define MATERIALIZE_CASE(id) \ + case arrow::Type::id: { \ + using T = typename arrow::TypeIdTraits::Type; \ + ARROW_ASSIGN_OR_RAISE(arrays.at(i_col), materializeColumn(field_type, i_col)); \ + break; \ + } + + // Build the arrays column-by-column from the rows + for (int i_col = 0; i_col < schema->num_fields(); ++i_col) { + const std::shared_ptr& field = schema->field(i_col); + const auto& field_type = field->type(); + + switch (field_type->id()) { + MATERIALIZE_CASE(BOOL) + MATERIALIZE_CASE(INT8) + MATERIALIZE_CASE(INT16) + MATERIALIZE_CASE(INT32) + MATERIALIZE_CASE(INT64) + MATERIALIZE_CASE(UINT8) + MATERIALIZE_CASE(UINT16) + MATERIALIZE_CASE(UINT32) + MATERIALIZE_CASE(UINT64) + MATERIALIZE_CASE(FLOAT) + MATERIALIZE_CASE(DOUBLE) + MATERIALIZE_CASE(DATE32) + MATERIALIZE_CASE(DATE64) + MATERIALIZE_CASE(TIME32) + MATERIALIZE_CASE(TIME64) + MATERIALIZE_CASE(TIMESTAMP) + MATERIALIZE_CASE(STRING) + MATERIALIZE_CASE(LARGE_STRING) + MATERIALIZE_CASE(BINARY) + MATERIALIZE_CASE(LARGE_BINARY) + default: + return arrow::Status::Invalid("Unsupported data type ", + field->type()->ToString(), " for field ", + field->name()); + } + } + +#undef MATERIALIZE_CASE + + std::shared_ptr r = + arrow::RecordBatch::Make(schema, (int64_t)num_rows, arrays); + return r; + } + + private: + struct UnmaterializedSlice { + CompositeEntry components[MAX_COMPOSITE_TABLES]; + size_t num_components; + + inline int64_t Size() const { + if (num_components == 0) { + return 0; + } + return components[0].end - components[0].start; + } + }; + + // Mapping from an output column ID to a source table ID and column ID + std::shared_ptr schema; + size_t num_composite_tables; + std::unordered_map> output_col_to_src; + + arrow::MemoryPool* pool; + + /// A map from address of a record batch to the record batch. Used to + /// maintain the lifetime of the record batch in case it goes out of scope + /// by the main exec node thread + std::unordered_map> ptr2Ref = {}; + std::vector slices; + + size_t num_rows = 0; + + // for AddRecordBatchRef/AddSlice and access to UnmaterializedSlice + friend class UnmaterializedSliceBuilder; + + void AddRecordBatchRef(const std::shared_ptr& ref) { + ptr2Ref[(uintptr_t)ref.get()] = ref; + } + void AddSlice(const UnmaterializedSlice& slice) { + slices.push_back(slice); + num_rows += slice.Size(); + } + + template ::BuilderType> + enable_if_boolean static BuilderAppend( + Builder& builder, const std::shared_ptr& source, uint64_t row) { + if (source->IsNull(row)) { + builder.UnsafeAppendNull(); + return Status::OK(); + } + builder.UnsafeAppend(bit_util::GetBit(source->template GetValues(1), row)); + return Status::OK(); + } + + template ::BuilderType> + enable_if_t::value && !is_boolean_type::value, + Status> static BuilderAppend(Builder& builder, + const std::shared_ptr& source, + uint64_t row) { + if (source->IsNull(row)) { + builder.UnsafeAppendNull(); + return Status::OK(); + } + using CType = typename TypeTraits::CType; + builder.UnsafeAppend(source->template GetValues(1)[row]); + return Status::OK(); + } + + template ::BuilderType> + enable_if_base_binary static BuilderAppend( + Builder& builder, const std::shared_ptr& source, uint64_t row) { + if (source->IsNull(row)) { + return builder.AppendNull(); + } + using offset_type = typename Type::offset_type; + const uint8_t* data = source->buffers[2]->data(); + const offset_type* offsets = source->GetValues(1); + const offset_type offset0 = offsets[row]; + const offset_type offset1 = offsets[row + 1]; + return builder.Append(data + offset0, offset1 - offset0); + } + + template ::BuilderType> + arrow::Result> materializeColumn( + const std::shared_ptr& type, int i_col) { + ARROW_ASSIGN_OR_RAISE(auto builderPtr, arrow::MakeBuilder(type, pool)); + Builder& builder = *arrow::internal::checked_cast(builderPtr.get()); + ARROW_RETURN_NOT_OK(builder.Reserve(num_rows)); + + const auto& [table_index, column_index] = output_col_to_src[i_col]; + + for (const auto& unmaterialized_slice : slices) { + const auto& [batch, start, end] = unmaterialized_slice.components[table_index]; + if (batch) { + for (uint64_t rowNum = start; rowNum < end; ++rowNum) { + arrow::Status st = BuilderAppend( + builder, batch->column_data(column_index), rowNum); + ARROW_RETURN_NOT_OK(st); + } + } else { + for (uint64_t rowNum = start; rowNum < end; ++rowNum) { + ARROW_RETURN_NOT_OK(builder.AppendNull()); + } + } + } + std::shared_ptr result; + ARROW_RETURN_NOT_OK(builder.Finish(&result)); + return Result{std::move(result)}; + } +}; + +/// A builder class that can append blocks of data to a row. A "slice" +/// is built by horizontally concatenating record batches. +template +class UnmaterializedSliceBuilder { + public: + explicit UnmaterializedSliceBuilder( + UnmaterializedCompositeTable* table_) + : table(table_) {} + + void AddEntry(std::shared_ptr rb, uint64_t start, uint64_t end) { + if (rb) { + table->AddRecordBatchRef(rb); + } + if (slice.num_components) { + size_t last_index = slice.num_components - 1; + DCHECK_EQ(slice.components[last_index].end - slice.components[last_index].start, + end - start) + << "Slices should be the same length. "; + } + slice.components[slice.num_components++] = CompositeEntry{rb.get(), start, end}; + } + + void Finalize() { table->AddSlice(slice); } + int64_t Size() { return slice.Size(); } + + private: + using TUnmaterializedCompositeTable = + UnmaterializedCompositeTable; + using TUnmaterializedSlice = + typename TUnmaterializedCompositeTable::UnmaterializedSlice; + + TUnmaterializedCompositeTable* table; + TUnmaterializedSlice slice{}; +}; + +} // namespace arrow::acero diff --git a/cpp/src/arrow/adapters/orc/CMakeLists.txt b/cpp/src/arrow/adapters/orc/CMakeLists.txt index e8ff69c191fb1..4d66151cd38c9 100644 --- a/cpp/src/arrow/adapters/orc/CMakeLists.txt +++ b/cpp/src/arrow/adapters/orc/CMakeLists.txt @@ -27,19 +27,15 @@ install(FILES adapter.h options.h arrow_add_pkg_config("arrow-orc") if(ARROW_BUILD_STATIC) - set(ARROW_LIBRARIES_FOR_STATIC_TESTS arrow_testing_static arrow_static) + set(ARROW_ORC_STATIC_LINK_LIBS ${ARROW_TEST_STATIC_LINK_LIBS}) else() - set(ARROW_LIBRARIES_FOR_STATIC_TESTS arrow_testing_shared arrow_shared) + set(ARROW_ORC_STATIC_LINK_LIBS ${ARROW_TEST_SHARED_LINK_LIBS}) endif() - -set(ORC_STATIC_TEST_LINK_LIBS orc::liborc ${ARROW_LIBRARIES_FOR_STATIC_TESTS} - ${ARROW_GTEST_GTEST_MAIN} ${ARROW_GTEST_GTEST}) - add_arrow_test(adapter_test PREFIX "arrow-orc" STATIC_LINK_LIBS - ${ORC_STATIC_TEST_LINK_LIBS}) + ${ARROW_ORC_STATIC_LINK_LIBS}) set_source_files_properties(adapter_test.cc PROPERTIES SKIP_PRECOMPILE_HEADERS ON SKIP_UNITY_BUILD_INCLUSION ON) diff --git a/cpp/src/arrow/array/array_base.cc b/cpp/src/arrow/array/array_base.cc index f7b8d7954e1cf..eab71de27b11a 100644 --- a/cpp/src/arrow/array/array_base.cc +++ b/cpp/src/arrow/array/array_base.cc @@ -87,6 +87,8 @@ struct ScalarFromArraySlotImpl { return Finish(a.GetString(index_)); } + Status Visit(const BinaryViewArray& a) { return Finish(a.GetString(index_)); } + Status Visit(const FixedSizeBinaryArray& a) { return Finish(a.GetString(index_)); } Status Visit(const DayTimeIntervalArray& a) { return Finish(a.Value(index_)); } diff --git a/cpp/src/arrow/array/array_binary.cc b/cpp/src/arrow/array/array_binary.cc index 9466b5a48f9d7..d83ba0ca8936d 100644 --- a/cpp/src/arrow/array/array_binary.cc +++ b/cpp/src/arrow/array/array_binary.cc @@ -24,6 +24,7 @@ #include "arrow/array/validate.h" #include "arrow/type.h" #include "arrow/type_traits.h" +#include "arrow/util/binary_view_util.h" #include "arrow/util/checked_cast.h" #include "arrow/util/logging.h" @@ -89,6 +90,33 @@ LargeStringArray::LargeStringArray(int64_t length, Status LargeStringArray::ValidateUTF8() const { return internal::ValidateUTF8(*data_); } +BinaryViewArray::BinaryViewArray(std::shared_ptr data) { + ARROW_CHECK_EQ(data->type->id(), Type::BINARY_VIEW); + SetData(std::move(data)); +} + +BinaryViewArray::BinaryViewArray(std::shared_ptr type, int64_t length, + std::shared_ptr views, BufferVector buffers, + std::shared_ptr null_bitmap, int64_t null_count, + int64_t offset) { + buffers.insert(buffers.begin(), std::move(views)); + buffers.insert(buffers.begin(), std::move(null_bitmap)); + SetData( + ArrayData::Make(std::move(type), length, std::move(buffers), null_count, offset)); +} + +std::string_view BinaryViewArray::GetView(int64_t i) const { + const std::shared_ptr* data_buffers = data_->buffers.data() + 2; + return util::FromBinaryView(raw_values_[i], data_buffers); +} + +StringViewArray::StringViewArray(std::shared_ptr data) { + ARROW_CHECK_EQ(data->type->id(), Type::STRING_VIEW); + SetData(std::move(data)); +} + +Status StringViewArray::ValidateUTF8() const { return internal::ValidateUTF8(*data_); } + FixedSizeBinaryArray::FixedSizeBinaryArray(const std::shared_ptr& data) { SetData(data); } diff --git a/cpp/src/arrow/array/array_binary.h b/cpp/src/arrow/array/array_binary.h index 7e58a96ff841a..fd68a379ddbfb 100644 --- a/cpp/src/arrow/array/array_binary.h +++ b/cpp/src/arrow/array/array_binary.h @@ -22,6 +22,7 @@ #include #include +#include #include #include #include @@ -217,6 +218,65 @@ class ARROW_EXPORT LargeStringArray : public LargeBinaryArray { Status ValidateUTF8() const; }; +// ---------------------------------------------------------------------- +// BinaryView and StringView + +/// Concrete Array class for variable-size binary view data using the +/// BinaryViewType::c_type struct to reference in-line or out-of-line string values +class ARROW_EXPORT BinaryViewArray : public FlatArray { + public: + using TypeClass = BinaryViewType; + using IteratorType = stl::ArrayIterator; + using c_type = BinaryViewType::c_type; + + explicit BinaryViewArray(std::shared_ptr data); + + BinaryViewArray(std::shared_ptr type, int64_t length, + std::shared_ptr views, BufferVector data_buffers, + std::shared_ptr null_bitmap = NULLPTR, + int64_t null_count = kUnknownNullCount, int64_t offset = 0); + + // For API compatibility with BinaryArray etc. + std::string_view GetView(int64_t i) const; + std::string GetString(int64_t i) const { return std::string{GetView(i)}; } + + const auto& values() const { return data_->buffers[1]; } + const c_type* raw_values() const { return raw_values_; } + + std::optional operator[](int64_t i) const { + return *IteratorType(*this, i); + } + + IteratorType begin() const { return IteratorType(*this); } + IteratorType end() const { return IteratorType(*this, length()); } + + protected: + using FlatArray::FlatArray; + + void SetData(std::shared_ptr data) { + FlatArray::SetData(std::move(data)); + raw_values_ = data_->GetValuesSafe(1); + } + + const c_type* raw_values_; +}; + +/// Concrete Array class for variable-size string view (utf-8) data using +/// BinaryViewType::c_type to reference in-line or out-of-line string values +class ARROW_EXPORT StringViewArray : public BinaryViewArray { + public: + using TypeClass = StringViewType; + + explicit StringViewArray(std::shared_ptr data); + + using BinaryViewArray::BinaryViewArray; + + /// \brief Validate that this array contains only valid UTF8 entries + /// + /// This check is also implied by ValidateFull() + Status ValidateUTF8() const; +}; + // ---------------------------------------------------------------------- // Fixed width binary diff --git a/cpp/src/arrow/array/array_binary_test.cc b/cpp/src/arrow/array/array_binary_test.cc index 3bc9bb91a022a..04391be0ac789 100644 --- a/cpp/src/arrow/array/array_binary_test.cc +++ b/cpp/src/arrow/array/array_binary_test.cc @@ -27,17 +27,21 @@ #include "arrow/array.h" #include "arrow/array/builder_binary.h" +#include "arrow/array/validate.h" #include "arrow/buffer.h" #include "arrow/memory_pool.h" #include "arrow/status.h" #include "arrow/testing/builder.h" #include "arrow/testing/gtest_util.h" +#include "arrow/testing/matchers.h" #include "arrow/testing/util.h" #include "arrow/type.h" #include "arrow/type_traits.h" #include "arrow/util/bit_util.h" #include "arrow/util/bitmap_builders.h" #include "arrow/util/checked_cast.h" +#include "arrow/util/key_value_metadata.h" +#include "arrow/util/logging.h" #include "arrow/visit_data_inline.h" namespace arrow { @@ -365,38 +369,134 @@ TYPED_TEST(TestStringArray, TestValidateOffsets) { this->TestValidateOffsets(); TYPED_TEST(TestStringArray, TestValidateData) { this->TestValidateData(); } +// Produce an Array of index/offset views from a std::vector of index/offset +// BinaryViewType::c_type +Result> MakeBinaryViewArray( + BufferVector data_buffers, const std::vector& views, + bool validate = true) { + auto length = static_cast(views.size()); + auto arr = std::make_shared( + utf8_view(), length, Buffer::FromVector(views), std::move(data_buffers)); + if (validate) { + RETURN_NOT_OK(arr->ValidateFull()); + } + return arr; +} + +TEST(StringViewArray, Validate) { + // Since this is a test of validation, we need to be able to construct invalid arrays. + auto buffer_s = Buffer::FromString("supercalifragilistic(sp?)"); + auto buffer_y = Buffer::FromString("yyyyyyyyyyyyyyyyyyyyyyyyy"); + + // empty array is valid + EXPECT_THAT(MakeBinaryViewArray({}, {}), Ok()); + + // empty array with some data buffers is valid + EXPECT_THAT(MakeBinaryViewArray({buffer_s, buffer_y}, {}), Ok()); + + // inline views need not have a corresponding buffer + EXPECT_THAT(MakeBinaryViewArray({}, + { + util::ToInlineBinaryView("hello"), + util::ToInlineBinaryView("world"), + util::ToInlineBinaryView("inline me"), + }), + Ok()); + + // non-inline views are expected to reference only buffers managed by the array + EXPECT_THAT( + MakeBinaryViewArray( + {buffer_s, buffer_y}, + {util::ToBinaryView("supe", static_cast(buffer_s->size()), 0, 0), + util::ToBinaryView("yyyy", static_cast(buffer_y->size()), 1, 0)}), + Ok()); + + // views may not reference data buffers not present in the array + EXPECT_THAT( + MakeBinaryViewArray( + {}, {util::ToBinaryView("supe", static_cast(buffer_s->size()), 0, 0)}), + Raises(StatusCode::IndexError)); + // ... or ranges which overflow the referenced data buffer + EXPECT_THAT( + MakeBinaryViewArray( + {buffer_s}, {util::ToBinaryView( + "supe", static_cast(buffer_s->size() + 50), 0, 0)}), + Raises(StatusCode::IndexError)); + + // Additionally, the prefixes of non-inline views must match the data buffer + EXPECT_THAT( + MakeBinaryViewArray( + {buffer_s, buffer_y}, + {util::ToBinaryView("SUPE", static_cast(buffer_s->size()), 0, 0), + util::ToBinaryView("yyyy", static_cast(buffer_y->size()), 1, 0)}), + Raises(StatusCode::Invalid)); + + // Invalid string views which are masked by a null bit do not cause validation to fail + auto invalid_but_masked = + MakeBinaryViewArray( + {buffer_s}, + {util::ToBinaryView("SUPE", static_cast(buffer_s->size()), 0, 0), + util::ToBinaryView("yyyy", 50, 40, 30)}, + /*validate=*/false) + .ValueOrDie() + ->data(); + invalid_but_masked->null_count = 2; + invalid_but_masked->buffers[0] = *AllocateEmptyBitmap(2); + EXPECT_THAT(internal::ValidateArrayFull(*invalid_but_masked), Ok()); + + // overlapping views are allowed + EXPECT_THAT( + MakeBinaryViewArray( + {buffer_s}, + { + util::ToBinaryView("supe", static_cast(buffer_s->size()), 0, 0), + util::ToBinaryView("uper", static_cast(buffer_s->size() - 1), 0, + 1), + util::ToBinaryView("perc", static_cast(buffer_s->size() - 2), 0, + 2), + util::ToBinaryView("erca", static_cast(buffer_s->size() - 3), 0, + 3), + }), + Ok()); +} + template class TestUTF8Array : public ::testing::Test { public: using TypeClass = T; - using offset_type = typename TypeClass::offset_type; using ArrayType = typename TypeTraits::ArrayType; - Status ValidateUTF8(int64_t length, std::vector offsets, - std::string_view data, int64_t offset = 0) { - ArrayType arr(length, Buffer::Wrap(offsets), std::make_shared(data), - /*null_bitmap=*/nullptr, /*null_count=*/0, offset); - return arr.ValidateUTF8(); + std::shared_ptr type() const { + if constexpr (is_binary_view_like_type::value) { + return TypeClass::is_utf8 ? utf8_view() : binary_view(); + } else { + return TypeTraits::type_singleton(); + } } - Status ValidateUTF8(const std::string& json) { - auto ty = TypeTraits::type_singleton(); - auto arr = ArrayFromJSON(ty, json); - return checked_cast(*arr).ValidateUTF8(); + Status ValidateUTF8(const Array& arr) { + return checked_cast(arr).ValidateUTF8(); + } + + Status ValidateUTF8(std::vector values) { + std::shared_ptr arr; + ArrayFromVector(type(), values, &arr); + return ValidateUTF8(*arr); } void TestValidateUTF8() { - ASSERT_OK(ValidateUTF8(R"(["Voix", "ambiguë", "d’un", "cœur"])")); - ASSERT_OK(ValidateUTF8(1, {0, 4}, "\xf4\x8f\xbf\xbf")); // \U0010ffff + ASSERT_OK( + ValidateUTF8(*ArrayFromJSON(type(), R"(["Voix", "ambiguë", "d’un", "cœur"])"))); + ASSERT_OK(ValidateUTF8({"\xf4\x8f\xbf\xbf"})); // \U0010ffff - ASSERT_RAISES(Invalid, ValidateUTF8(1, {0, 1}, "\xf4")); + ASSERT_RAISES(Invalid, ValidateUTF8({"\xf4"})); // More tests in TestValidateData() above // (ValidateFull() calls ValidateUTF8() internally) } }; -TYPED_TEST_SUITE(TestUTF8Array, StringArrowTypes); +TYPED_TEST_SUITE(TestUTF8Array, StringOrStringViewArrowTypes); TYPED_TEST(TestUTF8Array, TestValidateUTF8) { this->TestValidateUTF8(); } @@ -883,11 +983,15 @@ class TestBaseBinaryDataVisitor : public ::testing::Test { void SetUp() override { type_ = TypeTraits::type_singleton(); } void TestBasics() { - auto array = ArrayFromJSON(type_, R"(["foo", null, "bar"])"); + auto array = ArrayFromJSON( + type_, + R"(["foo", null, "bar", "inline_me", "allocate_me_aaaaa", "allocate_me_bbbb"])"); BinaryAppender appender; ArraySpanVisitor visitor; ASSERT_OK(visitor.Visit(*array->data(), &appender)); - ASSERT_THAT(appender.data, ::testing::ElementsAreArray({"foo", "(null)", "bar"})); + ASSERT_THAT(appender.data, + ::testing::ElementsAreArray({"foo", "(null)", "bar", "inline_me", + "allocate_me_aaaaa", "allocate_me_bbbb"})); ARROW_UNUSED(visitor); // Workaround weird MSVC warning } @@ -904,7 +1008,7 @@ class TestBaseBinaryDataVisitor : public ::testing::Test { std::shared_ptr type_; }; -TYPED_TEST_SUITE(TestBaseBinaryDataVisitor, BaseBinaryArrowTypes); +TYPED_TEST_SUITE(TestBaseBinaryDataVisitor, BaseBinaryOrBinaryViewLikeArrowTypes); TYPED_TEST(TestBaseBinaryDataVisitor, Basics) { this->TestBasics(); } diff --git a/cpp/src/arrow/array/array_test.cc b/cpp/src/arrow/array/array_test.cc index 2bef9d725d37f..46908439ef5f0 100644 --- a/cpp/src/arrow/array/array_test.cc +++ b/cpp/src/arrow/array/array_test.cc @@ -382,10 +382,12 @@ static std::vector> TestArrayUtilitiesAgainstTheseType float64(), binary(), large_binary(), + binary_view(), fixed_size_binary(3), decimal(16, 4), utf8(), large_utf8(), + utf8_view(), list(utf8()), list(int64()), // NOTE: Regression case for ARROW-9071/MakeArrayOfNull list(large_utf8()), @@ -601,12 +603,15 @@ static ScalarVector GetScalars() { std::make_shared(60, duration(TimeUnit::SECOND)), std::make_shared(hello), std::make_shared(hello), + std::make_shared(hello), std::make_shared( hello, fixed_size_binary(static_cast(hello->size()))), std::make_shared(Decimal128(10), decimal(16, 4)), std::make_shared(Decimal256(10), decimal(76, 38)), std::make_shared(hello), std::make_shared(hello), + std::make_shared(hello), + std::make_shared(Buffer::FromString("long string; not inlined")), std::make_shared(ArrayFromJSON(int8(), "[1, 2, 3]")), ScalarFromJSON(map(int8(), utf8()), R"([[1, "foo"], [2, "bar"]])"), std::make_shared(ArrayFromJSON(int8(), "[1, 1, 2, 2, 3, 3]")), @@ -647,13 +652,14 @@ TEST_F(TestArray, TestMakeArrayFromScalar) { for (int64_t length : {16}) { for (auto scalar : scalars) { + ARROW_SCOPED_TRACE("scalar type: ", scalar->type->ToString()); ASSERT_OK_AND_ASSIGN(auto array, MakeArrayFromScalar(*scalar, length)); ASSERT_OK(array->ValidateFull()); ASSERT_EQ(array->length(), length); ASSERT_EQ(array->null_count(), 0); // test case for ARROW-13321 - for (int64_t i : std::vector{0, length / 2, length - 1}) { + for (int64_t i : {int64_t{0}, length / 2, length - 1}) { ASSERT_OK_AND_ASSIGN(auto s, array->GetScalar(i)); AssertScalarsEqual(*s, *scalar, /*verbose=*/true); } diff --git a/cpp/src/arrow/array/array_view_test.cc b/cpp/src/arrow/array/array_view_test.cc index 07dc3014e4029..97110ea97f3fc 100644 --- a/cpp/src/arrow/array/array_view_test.cc +++ b/cpp/src/arrow/array/array_view_test.cc @@ -126,6 +126,38 @@ TEST(TestArrayView, StringAsBinary) { CheckView(expected, arr); } +TEST(TestArrayView, StringViewAsBinaryView) { + for (auto json : { + R"(["foox", "barz", null])", + R"(["foox", "barz_not_inlined", null])", + }) { + auto arr = ArrayFromJSON(utf8_view(), json); + auto expected = ArrayFromJSON(binary_view(), json); + CheckView(arr, expected); + CheckView(expected, arr); + } +} + +TEST(TestArrayView, StringViewAsBinaryViewInStruct) { + auto padl = ArrayFromJSON(list(int16()), "[[0, -1], [], [42]]"); + auto padr = ArrayFromJSON(utf8(), R"(["foox", "barz", null])"); + + for (auto json : { + R"(["foox", "barz", null])", + R"(["foox", "barz_not_inlined", null])", + }) { + auto arr = + StructArray::Make({padl, ArrayFromJSON(utf8_view(), json), padr}, {"", "", ""}) + .ValueOrDie(); + auto expected = + StructArray::Make({padl, ArrayFromJSON(binary_view(), json), padr}, {"", "", ""}) + .ValueOrDie(); + + CheckView(arr, expected); + CheckView(expected, arr); + } +} + TEST(TestArrayView, PrimitiveWrongSize) { auto arr = ArrayFromJSON(int16(), "[0, -1, 42]"); CheckViewFails(arr, int8()); diff --git a/cpp/src/arrow/array/builder_base.cc b/cpp/src/arrow/array/builder_base.cc index 3000aea3e189a..d3502a0ab645a 100644 --- a/cpp/src/arrow/array/builder_base.cc +++ b/cpp/src/arrow/array/builder_base.cc @@ -96,10 +96,7 @@ namespace { template struct AppendScalarImpl { template - enable_if_t::value || is_decimal_type::value || - is_fixed_size_binary_type::value, - Status> - Visit(const T&) { + Status HandleFixedWidth(const T&) { auto builder = checked_cast::BuilderType*>(builder_); RETURN_NOT_OK(builder->Reserve(n_repeats_ * (scalars_end_ - scalars_begin_))); @@ -117,7 +114,16 @@ struct AppendScalarImpl { } template - enable_if_base_binary Visit(const T&) { + enable_if_t::value, Status> Visit(const T& t) { + return HandleFixedWidth(t); + } + + Status Visit(const FixedSizeBinaryType& t) { return HandleFixedWidth(t); } + Status Visit(const Decimal128Type& t) { return HandleFixedWidth(t); } + Status Visit(const Decimal256Type& t) { return HandleFixedWidth(t); } + + template + enable_if_has_string_view Visit(const T&) { int64_t data_size = 0; for (auto it = scalars_begin_; it != scalars_end_; ++it) { const auto& scalar = checked_cast::ScalarType&>(*it); diff --git a/cpp/src/arrow/array/builder_binary.cc b/cpp/src/arrow/array/builder_binary.cc index 571f450aab9c1..3ff22d4a3feeb 100644 --- a/cpp/src/arrow/array/builder_binary.cc +++ b/cpp/src/arrow/array/builder_binary.cc @@ -35,11 +35,70 @@ #include "arrow/util/checked_cast.h" #include "arrow/util/decimal.h" #include "arrow/util/logging.h" +#include "arrow/visit_data_inline.h" namespace arrow { using internal::checked_cast; +// ---------------------------------------------------------------------- +// Binary/StringView +BinaryViewBuilder::BinaryViewBuilder(const std::shared_ptr& type, + MemoryPool* pool) + : BinaryViewBuilder(pool) {} + +Status BinaryViewBuilder::AppendArraySlice(const ArraySpan& array, int64_t offset, + int64_t length) { + auto bitmap = array.GetValues(0, 0); + auto values = array.GetValues(1) + offset; + + int64_t out_of_line_total = 0, i = 0; + VisitNullBitmapInline( + array.buffers[0].data, array.offset, array.length, array.null_count, + [&] { + if (!values[i].is_inline()) { + out_of_line_total += static_cast(values[i].size()); + } + ++i; + }, + [&] { ++i; }); + + RETURN_NOT_OK(Reserve(length)); + RETURN_NOT_OK(ReserveData(out_of_line_total)); + + for (int64_t i = 0; i < length; i++) { + if (bitmap && !bit_util::GetBit(bitmap, array.offset + offset + i)) { + UnsafeAppendNull(); + continue; + } + + UnsafeAppend(util::FromBinaryView(values[i], array.GetVariadicBuffers().data())); + } + return Status::OK(); +} + +Status BinaryViewBuilder::FinishInternal(std::shared_ptr* out) { + ARROW_ASSIGN_OR_RAISE(auto null_bitmap, null_bitmap_builder_.FinishWithLength(length_)); + ARROW_ASSIGN_OR_RAISE(auto data, data_builder_.FinishWithLength(length_)); + BufferVector buffers = {null_bitmap, data}; + for (auto&& buffer : data_heap_builder_.Finish()) { + buffers.push_back(std::move(buffer)); + } + *out = ArrayData::Make(type(), length_, std::move(buffers), null_count_); + Reset(); + return Status::OK(); +} + +Status BinaryViewBuilder::ReserveData(int64_t length) { + return data_heap_builder_.Reserve(length); +} + +void BinaryViewBuilder::Reset() { + ArrayBuilder::Reset(); + data_builder_.Reset(); + data_heap_builder_.Reset(); +} + // ---------------------------------------------------------------------- // Fixed width binary @@ -125,8 +184,8 @@ const uint8_t* FixedSizeBinaryBuilder::GetValue(int64_t i) const { std::string_view FixedSizeBinaryBuilder::GetView(int64_t i) const { const uint8_t* data_ptr = byte_builder_.data(); - return std::string_view(reinterpret_cast(data_ptr + i * byte_width_), - byte_width_); + return {reinterpret_cast(data_ptr + i * byte_width_), + static_cast(byte_width_)}; } // ---------------------------------------------------------------------- @@ -173,10 +232,10 @@ Status ChunkedStringBuilder::Finish(ArrayVector* out) { RETURN_NOT_OK(ChunkedBinaryBuilder::Finish(out)); // Change data type to string/utf8 - for (size_t i = 0; i < out->size(); ++i) { - std::shared_ptr data = (*out)[i]->data(); + for (auto& chunk : *out) { + std::shared_ptr data = chunk->data()->Copy(); data->type = ::arrow::utf8(); - (*out)[i] = std::make_shared(data); + chunk = std::make_shared(std::move(data)); } return Status::OK(); } diff --git a/cpp/src/arrow/array/builder_binary.h b/cpp/src/arrow/array/builder_binary.h index b0c4fe2fc81fd..3e87cf2403610 100644 --- a/cpp/src/arrow/array/builder_binary.h +++ b/cpp/src/arrow/array/builder_binary.h @@ -36,6 +36,7 @@ #include "arrow/buffer_builder.h" #include "arrow/status.h" #include "arrow/type.h" +#include "arrow/util/binary_view_util.h" #include "arrow/util/macros.h" #include "arrow/util/visibility.h" @@ -204,10 +205,10 @@ class BaseBinaryBuilder } } } else { - for (std::size_t i = 0; i < values.size(); ++i) { + for (const auto& value : values) { UnsafeAppendNextOffset(); - value_data_builder_.UnsafeAppend( - reinterpret_cast(values[i].data()), values[i].size()); + value_data_builder_.UnsafeAppend(reinterpret_cast(value.data()), + value.size()); } } @@ -463,6 +464,256 @@ class ARROW_EXPORT LargeStringBuilder : public LargeBinaryBuilder { std::shared_ptr type() const override { return large_utf8(); } }; +// ---------------------------------------------------------------------- +// BinaryViewBuilder, StringViewBuilder +// +// These builders do not support building raw pointer view arrays. + +namespace internal { + +// We allocate medium-sized memory chunks and accumulate data in those, which +// may result in some waste if there are many large-ish strings. If a string +// comes along that does not fit into a block, we allocate a new block and +// write into that. +// +// Later we can implement optimizations to continuing filling underfull blocks +// after encountering a large string that required allocating a new block. +class ARROW_EXPORT StringHeapBuilder { + public: + static constexpr int64_t kDefaultBlocksize = 32 << 10; // 32KB + + StringHeapBuilder(MemoryPool* pool, int64_t alignment) + : pool_(pool), alignment_(alignment) {} + + void SetBlockSize(int64_t blocksize) { blocksize_ = blocksize; } + + using c_type = BinaryViewType::c_type; + + template + std::conditional_t, c_type> Append(const uint8_t* value, + int64_t length) { + if (length <= BinaryViewType::kInlineSize) { + return util::ToInlineBinaryView(value, static_cast(length)); + } + + if constexpr (Safe) { + ARROW_RETURN_NOT_OK(Reserve(length)); + } + + auto v = + util::ToBinaryView(value, static_cast(length), + static_cast(blocks_.size() - 1), current_offset_); + + memcpy(current_out_buffer_, value, static_cast(length)); + current_out_buffer_ += length; + current_remaining_bytes_ -= length; + current_offset_ += static_cast(length); + return v; + } + + static constexpr int64_t ValueSizeLimit() { + return std::numeric_limits::max(); + } + + /// \brief Ensure that the indicated number of bytes can be appended via + /// UnsafeAppend operations without the need to allocate more memory + Status Reserve(int64_t num_bytes) { + if (ARROW_PREDICT_FALSE(num_bytes > ValueSizeLimit())) { + return Status::CapacityError( + "BinaryView or StringView elements cannot reference " + "strings larger than 2GB"); + } + if (num_bytes > current_remaining_bytes_) { + // Ensure the buffer is fully overwritten to avoid leaking uninitialized + // bytes from the allocator + if (current_remaining_bytes_ > 0) { + std::memset(current_out_buffer_, 0, current_remaining_bytes_); + blocks_.back() = SliceBuffer(blocks_.back(), 0, + blocks_.back()->size() - current_remaining_bytes_); + } + current_remaining_bytes_ = num_bytes > blocksize_ ? num_bytes : blocksize_; + ARROW_ASSIGN_OR_RAISE(std::shared_ptr new_block, + AllocateBuffer(current_remaining_bytes_, alignment_, pool_)); + current_offset_ = 0; + current_out_buffer_ = new_block->mutable_data(); + blocks_.emplace_back(std::move(new_block)); + } + return Status::OK(); + } + + void Reset() { + current_offset_ = 0; + current_out_buffer_ = NULLPTR; + current_remaining_bytes_ = 0; + blocks_.clear(); + } + + int64_t current_remaining_bytes() const { return current_remaining_bytes_; } + + std::vector> Finish() { + current_offset_ = 0; + current_out_buffer_ = NULLPTR; + current_remaining_bytes_ = 0; + return std::move(blocks_); + } + + private: + MemoryPool* pool_; + int64_t alignment_; + int64_t blocksize_ = kDefaultBlocksize; + std::vector> blocks_; + + int32_t current_offset_ = 0; + uint8_t* current_out_buffer_ = NULLPTR; + int64_t current_remaining_bytes_ = 0; +}; + +} // namespace internal + +class ARROW_EXPORT BinaryViewBuilder : public ArrayBuilder { + public: + using TypeClass = BinaryViewType; + + // this constructor provided for MakeBuilder compatibility + BinaryViewBuilder(const std::shared_ptr&, MemoryPool* pool); + + explicit BinaryViewBuilder(MemoryPool* pool = default_memory_pool(), + int64_t alignment = kDefaultBufferAlignment) + : ArrayBuilder(pool, alignment), + data_builder_(pool, alignment), + data_heap_builder_(pool, alignment) {} + + /// Set the size for future preallocated data buffers. + /// + /// The default size is 32KB, so after each 32KB of string data appended to the builder + /// a new data buffer will be allocated. Adjust this to a larger value to decrease the + /// frequency of allocation, or to a smaller value to lower the overhead of each + /// allocation. + void SetBlockSize(int64_t blocksize) { data_heap_builder_.SetBlockSize(blocksize); } + + /// The number of bytes which can be appended to this builder without allocating another + /// data buffer. + int64_t current_block_bytes_remaining() const { + return data_heap_builder_.current_remaining_bytes(); + } + + Status Append(const uint8_t* value, int64_t length) { + ARROW_RETURN_NOT_OK(Reserve(1)); + UnsafeAppendToBitmap(true); + ARROW_ASSIGN_OR_RAISE(auto v, + data_heap_builder_.Append(value, length)); + data_builder_.UnsafeAppend(v); + return Status::OK(); + } + + Status Append(const char* value, int64_t length) { + return Append(reinterpret_cast(value), length); + } + + Status Append(std::string_view value) { + return Append(value.data(), static_cast(value.size())); + } + + /// \brief Append without checking capacity + /// + /// Builder should have been presized using Reserve() and ReserveData(), + /// respectively, and the value must not be larger than 2GB + void UnsafeAppend(const uint8_t* value, int64_t length) { + UnsafeAppendToBitmap(true); + auto v = data_heap_builder_.Append(value, length); + data_builder_.UnsafeAppend(v); + } + + void UnsafeAppend(const char* value, int64_t length) { + UnsafeAppend(reinterpret_cast(value), length); + } + + void UnsafeAppend(const std::string& value) { + UnsafeAppend(value.c_str(), static_cast(value.size())); + } + + void UnsafeAppend(std::string_view value) { + UnsafeAppend(value.data(), static_cast(value.size())); + } + + /// \brief Ensures there is enough allocated available capacity in the + /// out-of-line data heap to append the indicated number of bytes without + /// additional allocations + Status ReserveData(int64_t length); + + Status AppendNulls(int64_t length) final { + ARROW_RETURN_NOT_OK(Reserve(length)); + data_builder_.UnsafeAppend(length, BinaryViewType::c_type{}); + UnsafeSetNull(length); + return Status::OK(); + } + + /// \brief Append a single null element + Status AppendNull() final { + ARROW_RETURN_NOT_OK(Reserve(1)); + data_builder_.UnsafeAppend(BinaryViewType::c_type{}); + UnsafeAppendToBitmap(false); + return Status::OK(); + } + + /// \brief Append a empty element (length-0 inline string) + Status AppendEmptyValue() final { + ARROW_RETURN_NOT_OK(Reserve(1)); + data_builder_.UnsafeAppend(BinaryViewType::c_type{}); + UnsafeAppendToBitmap(true); + return Status::OK(); + } + + /// \brief Append several empty elements + Status AppendEmptyValues(int64_t length) final { + ARROW_RETURN_NOT_OK(Reserve(length)); + data_builder_.UnsafeAppend(length, BinaryViewType::c_type{}); + UnsafeSetNotNull(length); + return Status::OK(); + } + + void UnsafeAppendNull() { + data_builder_.UnsafeAppend(BinaryViewType::c_type{}); + UnsafeAppendToBitmap(false); + } + + void UnsafeAppendEmptyValue() { + data_builder_.UnsafeAppend(BinaryViewType::c_type{}); + UnsafeAppendToBitmap(true); + } + + /// \brief Append a slice of a BinaryViewArray passed as an ArraySpan. Copies + /// the underlying out-of-line string memory to avoid memory lifetime issues + Status AppendArraySlice(const ArraySpan& array, int64_t offset, + int64_t length) override; + + void Reset() override; + + Status Resize(int64_t capacity) override { + ARROW_RETURN_NOT_OK(CheckCapacity(capacity)); + capacity = std::max(capacity, kMinBuilderCapacity); + ARROW_RETURN_NOT_OK(data_builder_.Resize(capacity)); + return ArrayBuilder::Resize(capacity); + } + + Status FinishInternal(std::shared_ptr* out) override; + + std::shared_ptr type() const override { return binary_view(); } + + protected: + TypedBufferBuilder data_builder_; + + // Accumulates out-of-line data in fixed-size chunks which are then attached + // to the resulting ArrayData + internal::StringHeapBuilder data_heap_builder_; +}; + +class ARROW_EXPORT StringViewBuilder : public BinaryViewBuilder { + public: + using BinaryViewBuilder::BinaryViewBuilder; + std::shared_ptr type() const override { return utf8_view(); } +}; + // ---------------------------------------------------------------------- // FixedSizeBinaryBuilder @@ -498,7 +749,7 @@ class ARROW_EXPORT FixedSizeBinaryBuilder : public ArrayBuilder { Status Append(const Buffer& s) { ARROW_RETURN_NOT_OK(Reserve(1)); - UnsafeAppend(std::string_view(s)); + UnsafeAppend(s); return Status::OK(); } @@ -549,7 +800,7 @@ class ARROW_EXPORT FixedSizeBinaryBuilder : public ArrayBuilder { UnsafeAppend(reinterpret_cast(value.data())); } - void UnsafeAppend(const Buffer& s) { UnsafeAppend(std::string_view(s)); } + void UnsafeAppend(const Buffer& s) { UnsafeAppend(std::string_view{s}); } void UnsafeAppend(const std::shared_ptr& s) { UnsafeAppend(*s); } diff --git a/cpp/src/arrow/array/builder_dict.cc b/cpp/src/arrow/array/builder_dict.cc index 525b0afbc908a..7a96463ec3c43 100644 --- a/cpp/src/arrow/array/builder_dict.cc +++ b/cpp/src/arrow/array/builder_dict.cc @@ -194,6 +194,12 @@ Status DictionaryMemoTable::GetOrInsert(const BinaryType*, std::string_view valu return impl_->GetOrInsert(value, out); } +Status DictionaryMemoTable::GetOrInsert(const BinaryViewType*, std::string_view value, + int32_t* out) { + // Create BinaryArray dictionary for now + return impl_->GetOrInsert(value, out); +} + Status DictionaryMemoTable::GetOrInsert(const LargeBinaryType*, std::string_view value, int32_t* out) { return impl_->GetOrInsert(value, out); diff --git a/cpp/src/arrow/array/builder_dict.h b/cpp/src/arrow/array/builder_dict.h index cb0aaf309915b..3f0d711dc5bb5 100644 --- a/cpp/src/arrow/array/builder_dict.h +++ b/cpp/src/arrow/array/builder_dict.h @@ -60,6 +60,12 @@ struct DictionaryValue> { BinaryType, LargeBinaryType>::type; }; +template +struct DictionaryValue> { + using type = std::string_view; + using PhysicalType = BinaryViewType; +}; + template struct DictionaryValue> { using type = std::string_view; @@ -114,6 +120,7 @@ class ARROW_EXPORT DictionaryMemoTable { Status GetOrInsert(const BinaryType*, std::string_view value, int32_t* out); Status GetOrInsert(const LargeBinaryType*, std::string_view value, int32_t* out); + Status GetOrInsert(const BinaryViewType*, std::string_view value, int32_t* out); class DictionaryMemoTableImpl; std::unique_ptr impl_; diff --git a/cpp/src/arrow/array/concatenate.cc b/cpp/src/arrow/array/concatenate.cc index f7549fa9d1d1a..37c7271b5b95c 100644 --- a/cpp/src/arrow/array/concatenate.cc +++ b/cpp/src/arrow/array/concatenate.cc @@ -43,6 +43,7 @@ #include "arrow/util/int_util_overflow.h" #include "arrow/util/logging.h" #include "arrow/util/ree_util.h" +#include "arrow/visit_data_inline.h" #include "arrow/visit_type_inline.h" namespace arrow { @@ -230,6 +231,45 @@ class ConcatenateImpl { return ConcatenateBuffers(value_buffers, pool_).Value(&out_->buffers[2]); } + Status Visit(const BinaryViewType& type) { + out_->buffers.resize(2); + + for (const auto& in_data : in_) { + for (const auto& buf : util::span(in_data->buffers).subspan(2)) { + out_->buffers.push_back(buf); + } + } + + ARROW_ASSIGN_OR_RAISE(auto view_buffers, Buffers(1, BinaryViewType::kSize)); + ARROW_ASSIGN_OR_RAISE(auto view_buffer, ConcatenateBuffers(view_buffers, pool_)); + + auto* views = view_buffer->mutable_data_as(); + size_t preceding_buffer_count = 0; + + int64_t i = in_[0]->length; + for (size_t in_index = 1; in_index < in_.size(); ++in_index) { + preceding_buffer_count += in_[in_index - 1]->buffers.size() - 2; + + for (int64_t end_i = i + in_[in_index]->length; i < end_i; ++i) { + if (views[i].is_inline()) continue; + views[i].ref.buffer_index = SafeSignedAdd( + views[i].ref.buffer_index, static_cast(preceding_buffer_count)); + } + } + + if (out_->buffers[0] != nullptr) { + i = in_[0]->length; + VisitNullBitmapInline( + out_->buffers[0]->data(), i, out_->length - i, out_->null_count, [&] { ++i; }, + [&] { + views[i++] = {}; // overwrite views under null bits with an empty view + }); + } + + out_->buffers[1] = std::move(view_buffer); + return Status::OK(); + } + Status Visit(const ListType&) { std::vector value_ranges; ARROW_ASSIGN_OR_RAISE(auto index_buffers, Buffers(1, sizeof(int32_t))); diff --git a/cpp/src/arrow/array/concatenate_test.cc b/cpp/src/arrow/array/concatenate_test.cc index 4c03fab731ffe..0ef1136ea78f8 100644 --- a/cpp/src/arrow/array/concatenate_test.cc +++ b/cpp/src/arrow/array/concatenate_test.cc @@ -92,9 +92,15 @@ class ConcatenateTest : public ::testing::Test { for (auto null_probability : this->null_probabilities_) { std::shared_ptr array; factory(size, null_probability, &array); + ASSERT_OK(array->ValidateFull()); auto expected = array->Slice(offsets.front(), offsets.back() - offsets.front()); + ASSERT_OK(expected->ValidateFull()); auto slices = this->Slices(array, offsets); + for (auto slice : slices) { + ASSERT_OK(slice->ValidateFull()); + } ASSERT_OK_AND_ASSIGN(auto actual, Concatenate(slices)); + ASSERT_OK(actual->ValidateFull()); AssertArraysEqual(*expected, *actual); if (actual->data()->buffers[0]) { CheckTrailingBitsAreZeroed(actual->data()->buffers[0], actual->length()); @@ -155,6 +161,14 @@ TEST_F(ConcatenateTest, StringType) { }); } +TEST_F(ConcatenateTest, StringViewType) { + Check([this](int32_t size, double null_probability, std::shared_ptr* out) { + *out = rng_.StringView(size, /*min_length =*/0, /*max_length =*/40, null_probability, + /*max_buffer_length=*/200); + ASSERT_OK((**out).ValidateFull()); + }); +} + TEST_F(ConcatenateTest, LargeStringType) { Check([this](int32_t size, double null_probability, std::shared_ptr* out) { *out = diff --git a/cpp/src/arrow/array/data.cc b/cpp/src/arrow/array/data.cc index 79595ab7c7c31..186682be3009e 100644 --- a/cpp/src/arrow/array/data.cc +++ b/cpp/src/arrow/array/data.cc @@ -31,6 +31,7 @@ #include "arrow/status.h" #include "arrow/type.h" #include "arrow/type_traits.h" +#include "arrow/util/binary_view_util.h" #include "arrow/util/bitmap_ops.h" #include "arrow/util/logging.h" #include "arrow/util/macros.h" @@ -92,6 +93,11 @@ bool RunEndEncodedMayHaveLogicalNulls(const ArrayData& data) { return ArraySpan(data).MayHaveLogicalNulls(); } +BufferSpan PackVariadicBuffers(util::span> buffers) { + return {const_cast(reinterpret_cast(buffers.data())), + static_cast(buffers.size() * sizeof(std::shared_ptr))}; +} + } // namespace internal std::shared_ptr ArrayData::Make(std::shared_ptr type, int64_t length, @@ -187,7 +193,7 @@ void ArraySpan::SetMembers(const ArrayData& data) { } this->offset = data.offset; - for (int i = 0; i < static_cast(data.buffers.size()); ++i) { + for (int i = 0; i < std::min(static_cast(data.buffers.size()), 3); ++i) { const std::shared_ptr& buffer = data.buffers[i]; // It is the invoker-of-kernels's responsibility to ensure that // const buffers are not written to accidentally. @@ -200,7 +206,7 @@ void ArraySpan::SetMembers(const ArrayData& data) { Type::type type_id = this->type->id(); if (type_id == Type::EXTENSION) { - const ExtensionType* ext_type = checked_cast(this->type); + auto* ext_type = checked_cast(this->type); type_id = ext_type->storage_type()->id(); } @@ -215,6 +221,11 @@ void ArraySpan::SetMembers(const ArrayData& data) { this->buffers[i] = {}; } + if (type_id == Type::STRING_VIEW || type_id == Type::BINARY_VIEW) { + // store the span of data buffers in the third buffer + this->buffers[2] = internal::PackVariadicBuffers(util::span(data.buffers).subspan(2)); + } + if (type_id == Type::DICTIONARY) { this->child_data.resize(1); this->child_data[0].SetMembers(*data.dictionary); @@ -247,6 +258,8 @@ int GetNumBuffers(const DataType& type) { case Type::LARGE_BINARY: case Type::STRING: case Type::LARGE_STRING: + case Type::STRING_VIEW: + case Type::BINARY_VIEW: case Type::DENSE_UNION: return 3; case Type::EXTENSION: @@ -351,6 +364,19 @@ void ArraySpan::FillFromScalar(const Scalar& value) { } this->buffers[2].data = const_cast(data_buffer); this->buffers[2].size = data_size; + } else if (type_id == Type::BINARY_VIEW || type_id == Type::STRING_VIEW) { + const auto& scalar = checked_cast(value); + + this->buffers[1].size = BinaryViewType::kSize; + this->buffers[1].data = scalar.scratch_space_; + static_assert(sizeof(BinaryViewType::c_type) <= sizeof(scalar.scratch_space_)); + auto* view = new (&scalar.scratch_space_) BinaryViewType::c_type; + if (scalar.is_valid) { + *view = util::ToBinaryView(std::string_view{*scalar.value}, 0, 0); + this->buffers[2] = internal::PackVariadicBuffers({&scalar.value, 1}); + } else { + *view = {}; + } } else if (type_id == Type::FIXED_SIZE_BINARY) { const auto& scalar = checked_cast(value); this->buffers[1].data = const_cast(scalar.value->data()); @@ -513,6 +539,14 @@ std::shared_ptr ArraySpan::ToArrayData() const { type_id = ext_type->storage_type()->id(); } + if (HasVariadicBuffers()) { + DCHECK_EQ(result->buffers.size(), 3); + result->buffers.pop_back(); + for (const auto& data_buffer : GetVariadicBuffers()) { + result->buffers.push_back(data_buffer); + } + } + if (type_id == Type::NA) { result->null_count = this->length; } else if (this->buffers[0].data == nullptr) { @@ -531,6 +565,16 @@ std::shared_ptr ArraySpan::ToArrayData() const { return result; } +util::span> ArraySpan::GetVariadicBuffers() const { + DCHECK(HasVariadicBuffers()); + return {buffers[2].data_as>(), + static_cast(buffers[2].size) / sizeof(std::shared_ptr)}; +} + +bool ArraySpan::HasVariadicBuffers() const { + return type->id() == Type::BINARY_VIEW || type->id() == Type::STRING_VIEW; +} + std::shared_ptr ArraySpan::ToArray() const { return MakeArray(this->ToArrayData()); } @@ -722,7 +766,8 @@ struct ViewDataImpl { } RETURN_NOT_OK(CheckInputAvailable()); - const auto& in_spec = in_layouts[in_layout_idx].buffers[in_buffer_idx]; + const auto& in_layout = in_layouts[in_layout_idx]; + const auto& in_spec = in_layout.buffers[in_buffer_idx]; if (out_spec != in_spec) { return InvalidView("incompatible layouts"); } @@ -733,6 +778,18 @@ struct ViewDataImpl { DCHECK_GT(in_data_item->buffers.size(), in_buffer_idx); out_buffers.push_back(in_data_item->buffers[in_buffer_idx]); ++in_buffer_idx; + + if (in_buffer_idx == in_layout.buffers.size()) { + if (out_layout.variadic_spec != in_layout.variadic_spec) { + return InvalidView("incompatible layouts"); + } + + if (in_layout.variadic_spec) { + for (; in_buffer_idx < in_data_item->buffers.size(); ++in_buffer_idx) { + out_buffers.push_back(in_data_item->buffers[in_buffer_idx]); + } + } + } AdjustInputPointer(); } diff --git a/cpp/src/arrow/array/data.h b/cpp/src/arrow/array/data.h index 8c6b250b71adf..40a77640cd1e5 100644 --- a/cpp/src/arrow/array/data.h +++ b/cpp/src/arrow/array/data.h @@ -28,6 +28,7 @@ #include "arrow/type.h" #include "arrow/util/bit_util.h" #include "arrow/util/macros.h" +#include "arrow/util/span.h" #include "arrow/util/visibility.h" namespace arrow { @@ -472,10 +473,12 @@ struct ARROW_EXPORT ArraySpan { void SetSlice(int64_t offset, int64_t length) { this->offset = offset; this->length = length; - if (this->type->id() != Type::NA) { + if (this->type->id() == Type::NA) { + this->null_count = this->length; + } else if (this->MayHaveNulls()) { this->null_count = kUnknownNullCount; } else { - this->null_count = this->length; + this->null_count = 0; } } @@ -530,6 +533,16 @@ struct ARROW_EXPORT ArraySpan { /// \see GetNullCount int64_t ComputeLogicalNullCount() const; + /// Some DataTypes (StringView, BinaryView) may have an arbitrary number of variadic + /// buffers. Since ArraySpan only has 3 buffers, we pack the variadic buffers into + /// buffers[2]; IE buffers[2].data points to the first shared_ptr of the + /// variadic set and buffers[2].size is the number of variadic buffers times + /// sizeof(shared_ptr). + /// + /// \see HasVariadicBuffers + util::span> GetVariadicBuffers() const; + bool HasVariadicBuffers() const; + private: ARROW_FRIEND_EXPORT friend bool internal::IsNullRunEndEncoded(const ArrayData& span, int64_t i); diff --git a/cpp/src/arrow/array/dict_internal.h b/cpp/src/arrow/array/dict_internal.h index 3c1c8c453d1e7..63d8583b17324 100644 --- a/cpp/src/arrow/array/dict_internal.h +++ b/cpp/src/arrow/array/dict_internal.h @@ -150,6 +150,32 @@ struct DictionaryTraits> { } }; +template +struct DictionaryTraits> { + using MemoTableType = typename HashTraits::MemoTableType; + + static_assert(std::is_same_v>); + + // Instead of defining a custom memo table for StringView we reuse BinaryType's, + // then convert to views when we copy data out of the memo table. + static Result> GetDictionaryArrayData( + MemoryPool* pool, const std::shared_ptr& type, + const MemoTableType& memo_table, int64_t start_offset) { + DCHECK(type->id() == Type::STRING_VIEW || type->id() == Type::BINARY_VIEW); + + BinaryViewBuilder builder(pool); + RETURN_NOT_OK(builder.Resize(memo_table.size() - start_offset)); + RETURN_NOT_OK(builder.ReserveData(memo_table.values_size())); + memo_table.VisitValues(static_cast(start_offset), + [&](std::string_view s) { builder.UnsafeAppend(s); }); + + std::shared_ptr out; + RETURN_NOT_OK(builder.FinishInternal(&out)); + out->type = type; + return out; + } +}; + template struct DictionaryTraits> { using MemoTableType = typename HashTraits::MemoTableType; diff --git a/cpp/src/arrow/array/diff.cc b/cpp/src/arrow/array/diff.cc index 800f19b752726..be9597e59b378 100644 --- a/cpp/src/arrow/array/diff.cc +++ b/cpp/src/arrow/array/diff.cc @@ -21,6 +21,7 @@ #include #include #include +#include #include #include #include @@ -32,6 +33,7 @@ #include "arrow/array/array_decimal.h" #include "arrow/array/array_nested.h" #include "arrow/array/array_primitive.h" +#include "arrow/array/array_run_end.h" #include "arrow/buffer.h" #include "arrow/buffer_builder.h" #include "arrow/extension_type.h" @@ -43,7 +45,9 @@ #include "arrow/util/checked_cast.h" #include "arrow/util/logging.h" #include "arrow/util/range.h" +#include "arrow/util/ree_util.h" #include "arrow/util/string.h" +#include "arrow/util/unreachable.h" #include "arrow/vendored/datetime.h" #include "arrow/visit_type_inline.h" @@ -96,45 +100,247 @@ static UnitSlice GetView(const UnionArray& array, int64_t index) { return UnitSlice{&array, index}; } -using ValueComparator = std::function; +/// \brief A simple virtual comparator interface for two arrays. +/// +/// The base and target array ara bound at construction time. Then +/// Equals(base_index, target_index) should return true if the values +/// at the given indices are equal. +struct ValueComparator { + virtual ~ValueComparator() = default; + + /// \brief Compare the validity and values at the given indices in the base and target + /// arrays. + /// + /// \param base_index The index in the base array. + /// \param target_index The index in the target array. + /// \return true if the values at the given indices are equal, false otherwise. + /// \pre base_index and target_index are valid indices in their respective arrays. + virtual bool Equals(int64_t base_index, int64_t target_index) = 0; + + /// \brief Return the run length of equal values starting at the given indices in the + /// base and target arrays. + /// + /// \param base_index The starting index in the base array. + /// \param base_length The length of the base array. + /// \param target_index The starting index in the target array. + /// \param target_length The length of the target array. + /// \return The run length of equal values starting at the given indices in the base + /// and target arrays. + virtual int64_t RunLengthOfEqualsFrom(int64_t base_index, int64_t base_length, + int64_t target_index, int64_t target_length) { + int64_t run_length_of_equals = 0; + while (base_index < base_length && target_index < target_length) { + if (!Equals(base_index, target_index)) { + break; + } + base_index += 1; + target_index += 1; + run_length_of_equals += 1; + } + return run_length_of_equals; + } +}; + +template +struct DefaultValueComparator : public ValueComparator { + const ArrayType& base; + const ArrayType& target; + + DefaultValueComparator(const ArrayType& base, const ArrayType& target) + : base(base), target(target) {} + + ~DefaultValueComparator() override = default; + + bool Equals(int64_t base_index, int64_t target_index) override { + const bool base_valid = base.IsValid(base_index); + const bool target_valid = target.IsValid(target_index); + if (base_valid && target_valid) { + return GetView(base, base_index) == GetView(target, target_index); + } + return base_valid == target_valid; + } +}; + +template +class REEValueComparator : public ValueComparator { + private: + const RunEndEncodedArray& base_; + const RunEndEncodedArray& target_; + std::unique_ptr inner_value_comparator_; + ree_util::PhysicalIndexFinder base_physical_index_finder_; + ree_util::PhysicalIndexFinder target_physical_index_finder_; + + public: + REEValueComparator(const RunEndEncodedArray& base, const RunEndEncodedArray& target, + std::unique_ptr&& inner_value_comparator) + : base_(base), + target_(target), + inner_value_comparator_(std::move(inner_value_comparator)), + base_physical_index_finder_(*base_.data()), + target_physical_index_finder_(*target_.data()) { + DCHECK_EQ(*base_.type(), *target_.type()); + } + + ~REEValueComparator() override = default; + + private: + /// \pre 0 <= i < base_.length() + inline int64_t FindPhysicalIndexOnBase(int64_t i) { + return base_physical_index_finder_.FindPhysicalIndex(i); + } + + /// \pre 0 <= i < target_.length() + inline int64_t FindPhysicalIndexOnTarget(int64_t i) { + return target_physical_index_finder_.FindPhysicalIndex(i); + } + + const RunEndCType* base_run_ends() { return base_physical_index_finder_.run_ends; } + + const RunEndCType* target_run_ends() { return target_physical_index_finder_.run_ends; } + + public: + int64_t RunLengthOfEqualsFrom(int64_t base_index, int64_t base_length, + int64_t target_index, int64_t target_length) override { + // Ensure the first search for physical index on the values arrays is safe. + if (base_index >= base_length || target_index >= target_length) { + // Without values on either side, there is no run of equal values. + return 0; + } + + // Translate the two logical indices into physical indices. + int64_t physical_base_index = FindPhysicalIndexOnBase(base_index); + int64_t physical_target_index = FindPhysicalIndexOnTarget(target_index); + + int64_t run_length_of_equals = 0; + // The loop invariant (base_index < base_length && target_index < target_length) + // is valid when the loop starts because of the check above. + for (;;) { + const auto base_run_end = + static_cast(base_run_ends()[physical_base_index]) - base_.offset(); + const auto target_run_end = + static_cast(target_run_ends()[physical_target_index]) - + target_.offset(); + // The end of the runs containing the logical indices, by definition, ends + // after the logical indices. + DCHECK_LT(base_index, base_run_end); + DCHECK_LT(target_index, target_run_end); + + // Compare the physical values that make up the runs containing base_index + // and target_index. + if (!inner_value_comparator_->Equals(physical_base_index, physical_target_index)) { + // First difference found, stop because the run of equal values cannot + // be extended further. + break; + } + + const int64_t base_run = std::min(base_run_end, base_length) - base_index; + const int64_t target_run = std::min(target_run_end, target_length) - target_index; + // Due to the loop-invariant (base_index < base_length && target_index < + // target_length) and properties of the run-ends asserted above, both base_run and + // target_run are strictly greater than zero. + DCHECK_GT(base_run, 0); + DCHECK_GT(target_run, 0); + + // Skip the smallest run (or both runs if they are equal) + const int64_t increment = std::min(base_run, target_run); + physical_base_index += increment == base_run; + physical_target_index += increment == target_run; + + // Since both base_run and target_run are greater than zero, + // increment is also greater than zero... + DCHECK_GT(increment, 0); + // ...which implies that the loop will make progress and eventually terminate + // because base_index or target_index will equal base_length or target_length, + // respectively. + base_index += increment; + target_index += increment; + // The value representing the two runs are equal, so we can assume that at + // least `increment` (size of smallest run) values are equal. + run_length_of_equals += increment; + + if (base_index >= base_length || target_index >= target_length) { + break; + } + } + + return run_length_of_equals; + } + + bool Equals(int64_t base_index, int64_t target_index) override { + const int64_t physical_base_index = FindPhysicalIndexOnBase(base_index); + const int64_t physical_target_index = FindPhysicalIndexOnTarget(target_index); + return inner_value_comparator_->Equals(physical_base_index, physical_target_index); + } +}; + +class ValueComparatorFactory { + private: + std::unique_ptr comparator_; -struct ValueComparatorVisitor { + public: template - Status Visit(const T&) { + Status Visit(const T&, const Array& base, const Array& target) { using ArrayType = typename TypeTraits::ArrayType; - out = [](const Array& base, int64_t base_index, const Array& target, - int64_t target_index) { - return (GetView(checked_cast(base), base_index) == - GetView(checked_cast(target), target_index)); - }; + comparator_ = std::make_unique>( + checked_cast(base), checked_cast(target)); return Status::OK(); } - Status Visit(const NullType&) { return Status::NotImplemented("null type"); } + Status Visit(const NullType&, const Array&, const Array&) { + return Status::NotImplemented("null type"); + } - Status Visit(const ExtensionType&) { return Status::NotImplemented("extension type"); } + Status Visit(const ExtensionType&, const Array&, const Array&) { + return Status::NotImplemented("extension type"); + } - Status Visit(const DictionaryType&) { + Status Visit(const DictionaryType&, const Array& base, const Array& target) { return Status::NotImplemented("dictionary type"); } - Status Visit(const RunEndEncodedType&) { - return Status::NotImplemented("run-end encoded type"); + Status Visit(const RunEndEncodedType& ree_type, const Array& base, + const Array& target) { + const auto& base_ree = checked_cast(base); + const auto& target_ree = checked_cast(target); + + ARROW_ASSIGN_OR_RAISE( + auto inner_values_comparator, + Create(*ree_type.value_type(), *base_ree.values(), *target_ree.values())); + + // Instantiate the specialized comparator types with operator new instead of + // make_unique() to avoid binary bloat. unique_ptr's constructor is templated + // on the type of the deleter and we're fine with destructor calls being virtually + // dispatched via ValueComparator. + ValueComparator* ree_value_comparator = nullptr; + switch (ree_type.run_end_type()->id()) { + case Type::INT16: + ree_value_comparator = new REEValueComparator( + base_ree, target_ree, std::move(inner_values_comparator)); + break; + case Type::INT32: + ree_value_comparator = new REEValueComparator( + base_ree, target_ree, std::move(inner_values_comparator)); + break; + case Type::INT64: + ree_value_comparator = new REEValueComparator( + base_ree, target_ree, std::move(inner_values_comparator)); + break; + default: + Unreachable(); + } + comparator_.reset(ree_value_comparator); + return Status::OK(); } - ValueComparator Create(const DataType& type) { - DCHECK_OK(VisitTypeInline(type, this)); - return out; + static Result> Create(const DataType& type, + const Array& base, + const Array& target) { + ValueComparatorFactory self; + RETURN_NOT_OK(VisitTypeInline(type, &self, base, target)); + return std::move(self.comparator_); } - - ValueComparator out; }; -ValueComparator GetValueComparator(const DataType& type) { - ValueComparatorVisitor type_visitor; - return type_visitor.Create(type); -} - // represents an intermediate state in the comparison of two arrays struct EditPoint { int64_t base, target; @@ -161,33 +367,9 @@ struct EditPoint { class QuadraticSpaceMyersDiff { public: QuadraticSpaceMyersDiff(const Array& base, const Array& target, MemoryPool* pool) - : base_(base), - target_(target), - pool_(pool), - value_comparator_(GetValueComparator(*base.type())), - base_begin_(0), - base_end_(base.length()), - target_begin_(0), - target_end_(target.length()), - endpoint_base_({ExtendFrom({base_begin_, target_begin_}).base}), - insert_({true}) { - if ((base_end_ - base_begin_ == target_end_ - target_begin_) && - endpoint_base_[0] == base_end_) { - // trivial case: base == target - finish_index_ = 0; - } - } - - bool ValuesEqual(int64_t base_index, int64_t target_index) const { - bool base_null = base_.IsNull(base_index); - bool target_null = target_.IsNull(target_index); - if (base_null || target_null) { - // If only one is null, then this is false, otherwise true - return base_null && target_null; - } - return value_comparator_(base_, base_index, target_, target_index); - } + : base_(base), target_(target), pool_(pool) {} + private: // increment the position within base (the element pointed to was deleted) // then extend maximally EditPoint DeleteOne(EditPoint p) const { @@ -209,11 +391,10 @@ class QuadraticSpaceMyersDiff { // increment the position within base and target (the elements skipped in this way were // present in both sequences) EditPoint ExtendFrom(EditPoint p) const { - for (; p.base != base_end_ && p.target != target_end_; ++p.base, ++p.target) { - if (!ValuesEqual(p.base, p.target)) { - break; - } - } + const int64_t run_length_of_equals = + _comparator->RunLengthOfEqualsFrom(p.base, base_end_, p.target, target_end_); + p.base += run_length_of_equals; + p.target += run_length_of_equals; return p; } @@ -321,7 +502,24 @@ class QuadraticSpaceMyersDiff { {field("insert", boolean()), field("run_length", int64())}); } + public: Result> Diff() { + base_begin_ = 0; + base_end_ = base_.length(); + target_begin_ = 0; + target_end_ = target_.length(); + ARROW_ASSIGN_OR_RAISE(_comparator, + ValueComparatorFactory::Create(*base_.type(), base_, target_)); + + finish_index_ = -1; + edit_count_ = 0; + endpoint_base_ = {ExtendFrom({base_begin_, target_begin_}).base}; + insert_ = {true}; + if ((base_end_ - base_begin_ == target_end_ - target_begin_) && + endpoint_base_[0] == base_end_) { + // trivial case: base == target + finish_index_ = 0; + } while (!Done()) { Next(); } @@ -329,14 +527,19 @@ class QuadraticSpaceMyersDiff { } private: + // Constructor-injected references const Array& base_; const Array& target_; MemoryPool* pool_; - ValueComparator value_comparator_; + + // Initialized on Diff() and immutable thereafter + int64_t base_begin_ = 0, base_end_ = -1; + int64_t target_begin_ = 0, target_end_ = -1; + std::unique_ptr _comparator; + + // Initialized on Next() and mutated throughout the diffing process int64_t finish_index_ = -1; int64_t edit_count_ = 0; - int64_t base_begin_, base_end_; - int64_t target_begin_, target_end_; // each element of endpoint_base_ is the furthest position in base reachable given an // edit_count and (# insertions) - (# deletions). Each bit of insert_ records whether // the corresponding furthest position was reached via an insertion or a deletion @@ -386,8 +589,6 @@ Result> Diff(const Array& base, const Array& target return Diff(*base_storage, *target_storage, pool); } else if (base.type()->id() == Type::DICTIONARY) { return Status::NotImplemented("diffing arrays of type ", *base.type()); - } else if (base.type()->id() == Type::RUN_END_ENCODED) { - return Status::NotImplemented("diffing arrays of type ", *base.type()); } else { return QuadraticSpaceMyersDiff(base, target, pool).Diff(); } @@ -476,30 +677,31 @@ class MakeFormatterImpl { return Status::OK(); } - // format Binary, LargeBinary and FixedSizeBinary in hexadecimal template - enable_if_binary_like Visit(const T&) { + enable_if_has_string_view Visit(const T&) { using ArrayType = typename TypeTraits::ArrayType; impl_ = [](const Array& array, int64_t index, std::ostream* os) { - *os << HexEncode(checked_cast(array).GetView(index)); + std::string_view view = checked_cast(array).GetView(index); + if constexpr (T::is_utf8) { + // format String and StringView with \"\n\r\t\\ escaped + *os << '"' << Escape(view) << '"'; + } else { + // format Binary, LargeBinary, BinaryView, and FixedSizeBinary in hexadecimal + *os << HexEncode(view); + } }; return Status::OK(); } - // format Strings with \"\n\r\t\\ escaped + // format Decimals with Decimal___Array::FormatValue template - enable_if_string_like Visit(const T&) { - using ArrayType = typename TypeTraits::ArrayType; + enable_if_decimal Visit(const T&) { impl_ = [](const Array& array, int64_t index, std::ostream* os) { - *os << "\"" << Escape(checked_cast(array).GetView(index)) << "\""; - }; - return Status::OK(); - } - - // format Decimals with Decimal128Array::FormatValue - Status Visit(const Decimal128Type&) { - impl_ = [](const Array& array, int64_t index, std::ostream* os) { - *os << checked_cast(array).FormatValue(index); + if constexpr (T::type_id == Type::DECIMAL128) { + *os << checked_cast(array).FormatValue(index); + } else { + *os << checked_cast(array).FormatValue(index); + } }; return Status::OK(); } diff --git a/cpp/src/arrow/array/diff_test.cc b/cpp/src/arrow/array/diff_test.cc index e322006732f9a..145978a91ad54 100644 --- a/cpp/src/arrow/array/diff_test.cc +++ b/cpp/src/arrow/array/diff_test.cc @@ -163,6 +163,107 @@ class DiffTest : public ::testing::Test { AssertRunLengthIs("[2, 0, 0]"); } + std::shared_ptr RunEndEncodedArrayFromJSON( + int64_t logical_length, const std::shared_ptr& ree_type, + std::string_view run_ends_json, std::string_view values_json, + int64_t logical_offset = 0) { + auto& ree_type_ref = checked_cast(*ree_type); + auto run_ends = ArrayFromJSON(ree_type_ref.run_end_type(), run_ends_json); + auto values = ArrayFromJSON(ree_type_ref.value_type(), values_json); + return RunEndEncodedArray::Make(logical_length, std::move(run_ends), + std::move(values), logical_offset) + .ValueOrDie(); + } + + template + void TestBasicsWithREEs() { + auto run_end_type = std::make_shared(); + auto value_type = utf8(); + auto ree_type = run_end_encoded(run_end_type, value_type); + + // empty REEs + base_ = RunEndEncodedArrayFromJSON(0, ree_type, "[]", "[]"); + target_ = RunEndEncodedArrayFromJSON(0, ree_type, "[]", "[]"); + DoDiff(); + AssertInsertIs("[false]"); + AssertRunLengthIs("[0]"); + + // null REE arrays of different lengths + base_ = RunEndEncodedArrayFromJSON(2, ree_type, "[2]", "[null]"); + target_ = RunEndEncodedArrayFromJSON(4, ree_type, "[4]", "[null]"); + DoDiff(); + AssertInsertIs("[false, true, true]"); + AssertRunLengthIs("[2, 0, 0]"); + + // identical REE arrays w/ offsets + base_ = + RunEndEncodedArrayFromJSON(110, ree_type, R"([20, 120])", R"(["a", "b"])", 10); + target_ = + RunEndEncodedArrayFromJSON(110, ree_type, R"([20, 120])", R"(["a", "b"])", 10); + DoDiff(); + AssertInsertIs("[false]"); + AssertRunLengthIs("[110]"); + + // equivalent REE arrays + base_ = RunEndEncodedArrayFromJSON(120, ree_type, R"([10, 20, 120])", + R"(["a", "a", "b"])"); + target_ = RunEndEncodedArrayFromJSON(120, ree_type, R"([20, 30, 120])", + R"(["a", "b", "b"])"); + DoDiff(); + AssertInsertIs("[false]"); + AssertRunLengthIs("[120]"); + + // slice so last run-end goes beyond length + base_ = base_->Slice(5, 105); + target_ = target_->Slice(5, 105); + DoDiff(); + AssertInsertIs("[false]"); + AssertRunLengthIs("[105]"); + + // insert one + base_ = RunEndEncodedArrayFromJSON(12, ree_type, R"([2, 12])", R"(["a", "b"])"); + target_ = RunEndEncodedArrayFromJSON(13, ree_type, R"([3, 13])", R"(["a", "b"])"); + DoDiff(); + AssertInsertIs("[false, true]"); + AssertRunLengthIs("[2, 10]"); + + // delete one + base_ = + RunEndEncodedArrayFromJSON(13, ree_type, R"([2, 5, 13])", R"(["a", "b", "c"])"); + target_ = + RunEndEncodedArrayFromJSON(12, ree_type, R"([2, 4, 12])", R"(["a", "b", "c"])"); + DoDiff(); + AssertInsertIs("[false, false]"); + AssertRunLengthIs("[4, 8]"); + + // null out one + base_ = + RunEndEncodedArrayFromJSON(12, ree_type, R"([2, 5, 12])", R"(["a", "b", "c"])"); + target_ = RunEndEncodedArrayFromJSON(12, ree_type, R"([2, 4, 5, 12])", + R"(["a", "b", null, "c"])"); + DoDiff(); + AssertInsertIs("[false, false, true]"); + AssertRunLengthIs("[4, 0, 7]"); + + // append some + base_ = RunEndEncodedArrayFromJSON(12, ree_type, R"([2, 4, 8, 12])", + R"(["a", "b", "c", "d"])"); + target_ = RunEndEncodedArrayFromJSON(15, ree_type, R"([2, 4, 8, 13, 15])", + R"(["a", "b", "c", "d", "e"])"); + DoDiff(); + AssertInsertIs("[false, true, true, true]"); + AssertRunLengthIs("[12, 0, 0, 0]"); + + // prepend some + base_ = RunEndEncodedArrayFromJSON(12, ree_type, R"([2, 4, 8, 12])", + R"(["c", "d", "e", "f"])"); + target_ = RunEndEncodedArrayFromJSON(15, ree_type, R"([1, 3, 5, 7, 11, 15])", + R"(["a", "b", "c", "d", "e", "f"])"); + DoDiff(); + AssertInsertIs("[false, true, true, true]"); + AssertRunLengthIs("[0, 0, 0, 12]"); + } + random::RandomArrayGenerator rng_; std::shared_ptr edits_; std::shared_ptr base_, target_; @@ -415,6 +516,12 @@ TEST_F(DiffTest, BasicsWithSparseUnions) { TestBasicsWithUnions(UnionMode::SPARS TEST_F(DiffTest, BasicsWithDenseUnions) { TestBasicsWithUnions(UnionMode::DENSE); } +TEST_F(DiffTest, BasicsWithREEs) { + TestBasicsWithREEs(); + TestBasicsWithREEs(); + TestBasicsWithREEs(); +} + TEST_F(DiffTest, UnifiedDiffFormatter) { // no changes base_ = ArrayFromJSON(utf8(), R"(["give", "me", "a", "break"])"); @@ -596,6 +703,19 @@ TEST_F(DiffTest, UnifiedDiffFormatter) { @@ -5, +8 @@ +79 +11 +)"); + } + + for (const auto& type : { + decimal128(10, 4), + decimal256(10, 4), + }) { + base_ = ArrayFromJSON(type, R"(["123.4567", "-78.9000"])"); + target_ = ArrayFromJSON(type, R"(["123.4567", "-123.4567"])"); + AssertDiffAndFormat(R"( +@@ -1, +1 @@ +--78.9000 ++-123.4567 )"); } } diff --git a/cpp/src/arrow/array/util.cc b/cpp/src/arrow/array/util.cc index 98e9d51b5fc75..9ea2fc2b6f0a1 100644 --- a/cpp/src/arrow/array/util.cc +++ b/cpp/src/arrow/array/util.cc @@ -43,6 +43,9 @@ #include "arrow/util/decimal.h" #include "arrow/util/endian.h" #include "arrow/util/logging.h" +#include "arrow/util/sort.h" +#include "arrow/util/span.h" +#include "arrow/visit_data_inline.h" #include "arrow/visit_type_inline.h" namespace arrow { @@ -271,6 +274,13 @@ class ArrayDataEndianSwapper { return Status::OK(); } + Status Visit(const BinaryViewType& type) { + // TODO(GH-37879): This requires knowledge of whether the array is being swapped to + // native endian or from it so that we know what size to trust when deciding whether + // something is an inline view. + return Status::NotImplemented("Swapping endianness of ", type); + } + Status Visit(const ListType& type) { RETURN_NOT_OK(SwapOffsets(1)); return Status::OK(); @@ -379,6 +389,10 @@ class NullArrayFactory { return MaxOf(sizeof(typename T::offset_type) * (length_ + 1)); } + Status Visit(const BinaryViewType& type) { + return MaxOf(sizeof(BinaryViewType::c_type) * length_); + } + Status Visit(const FixedSizeListType& type) { return MaxOf(GetBufferLength(type.value_type(), type.list_size() * length_)); } @@ -498,6 +512,11 @@ class NullArrayFactory { return Status::OK(); } + Status Visit(const BinaryViewType&) { + out_->buffers.resize(2, buffer_); + return Status::OK(); + } + template enable_if_var_size_list Visit(const T& type) { out_->buffers.resize(2, buffer_); @@ -600,6 +619,11 @@ class RepeatedArrayFactory { RepeatedArrayFactory(MemoryPool* pool, const Scalar& scalar, int64_t length) : pool_(pool), scalar_(scalar), length_(length) {} + template + const auto& scalar() const { + return checked_cast::ScalarType&>(scalar_); + } + Result> Create() { RETURN_NOT_OK(VisitTypeInline(*scalar_.type, this)); return out_; @@ -621,7 +645,7 @@ class RepeatedArrayFactory { template enable_if_t::value || is_temporal_type::value, Status> Visit( const T&) { - auto value = checked_cast::ScalarType&>(scalar_).value; + auto value = scalar().value; return FinishFixedWidth(&value, sizeof(value)); } @@ -632,8 +656,7 @@ class RepeatedArrayFactory { template enable_if_decimal Visit(const T&) { - using ScalarType = typename TypeTraits::ScalarType; - auto value = checked_cast(scalar_).value.ToBytes(); + auto value = scalar().value.ToBytes(); return FinishFixedWidth(value.data(), value.size()); } @@ -644,29 +667,36 @@ class RepeatedArrayFactory { template enable_if_base_binary Visit(const T&) { - std::shared_ptr value = - checked_cast::ScalarType&>(scalar_).value; + const std::shared_ptr& value = scalar().value; std::shared_ptr values_buffer, offsets_buffer; RETURN_NOT_OK(CreateBufferOf(value->data(), value->size(), &values_buffer)); auto size = static_cast(value->size()); RETURN_NOT_OK(CreateOffsetsBuffer(size, &offsets_buffer)); - out_ = std::make_shared::ArrayType>(length_, offsets_buffer, - values_buffer); + out_ = std::make_shared::ArrayType>( + length_, std::move(offsets_buffer), std::move(values_buffer)); + return Status::OK(); + } + + template + enable_if_binary_view_like Visit(const T& type) { + std::string_view value{*scalar().value}; + auto s = util::ToBinaryView(value, 0, 0); + RETURN_NOT_OK(FinishFixedWidth(&s, sizeof(s))); + if (!s.is_inline()) { + out_->data()->buffers.push_back(scalar().value); + } return Status::OK(); } template enable_if_var_size_list Visit(const T& type) { - using ScalarType = typename TypeTraits::ScalarType; using ArrayType = typename TypeTraits::ArrayType; - auto value = checked_cast(scalar_).value; - - ArrayVector values(length_, value); + ArrayVector values(length_, scalar().value); ARROW_ASSIGN_OR_RAISE(auto value_array, Concatenate(values, pool_)); std::shared_ptr offsets_buffer; - auto size = static_cast(value->length()); + auto size = static_cast(scalar().value->length()); RETURN_NOT_OK(CreateOffsetsBuffer(size, &offsets_buffer)); out_ = diff --git a/cpp/src/arrow/array/validate.cc b/cpp/src/arrow/array/validate.cc index 19ff8e28b536c..3dde41b1450e8 100644 --- a/cpp/src/arrow/array/validate.cc +++ b/cpp/src/arrow/array/validate.cc @@ -31,41 +31,43 @@ #include "arrow/util/int_util_overflow.h" #include "arrow/util/logging.h" #include "arrow/util/ree_util.h" +#include "arrow/util/sort.h" +#include "arrow/util/string.h" +#include "arrow/util/unreachable.h" #include "arrow/util/utf8.h" #include "arrow/visit_data_inline.h" #include "arrow/visit_type_inline.h" -namespace arrow { -namespace internal { +namespace arrow::internal { namespace { struct UTF8DataValidator { const ArrayData& data; - Status Visit(const DataType&) { - // Default, should be unreachable - return Status::NotImplemented(""); - } + template + Status Visit(const T&) { + if constexpr (std::is_same_v || std::is_same_v || + std::is_same_v) { + util::InitializeUTF8(); - template - enable_if_string Visit(const StringType&) { - util::InitializeUTF8(); - - int64_t i = 0; - return VisitArraySpanInline( - data, - [&](std::string_view v) { - if (ARROW_PREDICT_FALSE(!util::ValidateUTF8(v))) { - return Status::Invalid("Invalid UTF8 sequence at string index ", i); - } - ++i; - return Status::OK(); - }, - [&]() { - ++i; - return Status::OK(); - }); + int64_t i = 0; + return VisitArraySpanInline( + data, + [&](std::string_view v) { + if (ARROW_PREDICT_FALSE(!util::ValidateUTF8(v))) { + return Status::Invalid("Invalid UTF8 sequence at string index ", i); + } + ++i; + return Status::OK(); + }, + [&]() { + ++i; + return Status::OK(); + }); + } else { + Unreachable("utf-8 validation of non string type"); + } } }; @@ -169,6 +171,14 @@ struct ValidateArrayImpl { return Status::OK(); } + Status Visit(const StringViewType& type) { + RETURN_NOT_OK(ValidateBinaryView(type)); + if (full_validation) { + RETURN_NOT_OK(ValidateUTF8(data)); + } + return Status::OK(); + } + Status Visit(const Date64Type& type) { RETURN_NOT_OK(ValidateFixedWidthBuffers()); @@ -248,6 +258,8 @@ struct ValidateArrayImpl { Status Visit(const LargeBinaryType& type) { return ValidateBinaryLike(type); } + Status Visit(const BinaryViewType& type) { return ValidateBinaryView(type); } + Status Visit(const ListType& type) { return ValidateListLike(type); } Status Visit(const LargeListType& type) { return ValidateListLike(type); } @@ -453,11 +465,16 @@ struct ValidateArrayImpl { return Status::Invalid("Array length is negative"); } - if (data.buffers.size() != layout.buffers.size()) { + if (layout.variadic_spec) { + if (data.buffers.size() < layout.buffers.size()) { + return Status::Invalid("Expected at least ", layout.buffers.size(), + " buffers in array of type ", type.ToString(), ", got ", + data.buffers.size()); + } + } else if (data.buffers.size() != layout.buffers.size()) { return Status::Invalid("Expected ", layout.buffers.size(), - " buffers in array " - "of type ", - type.ToString(), ", got ", data.buffers.size()); + " buffers in array of type ", type.ToString(), ", got ", + data.buffers.size()); } // This check is required to avoid addition overflow below @@ -469,7 +486,9 @@ struct ValidateArrayImpl { for (int i = 0; i < static_cast(data.buffers.size()); ++i) { const auto& buffer = data.buffers[i]; - const auto& spec = layout.buffers[i]; + const auto& spec = i < static_cast(layout.buffers.size()) + ? layout.buffers[i] + : *layout.variadic_spec; if (buffer == nullptr) { continue; @@ -595,6 +614,85 @@ struct ValidateArrayImpl { return Status::OK(); } + Status ValidateBinaryView(const BinaryViewType& type) { + int64_t views_byte_size = data.buffers[1]->size(); + int64_t required_view_count = data.length + data.offset; + if (static_cast(views_byte_size / BinaryViewType::kSize) < + required_view_count) { + return Status::Invalid("View buffer size (bytes): ", views_byte_size, + " isn't large enough for length: ", data.length, + " and offset: ", data.offset); + } + + if (!full_validation) return Status::OK(); + + auto CheckPrefix = [&](size_t i, + std::array prefix, + const uint8_t* data) { + if (std::memcmp(data, prefix.data(), BinaryViewType::kPrefixSize) == 0) { + return Status::OK(); + } + return Status::Invalid("View at slot ", i, " has inlined prefix 0x", + HexEncode(prefix.data(), BinaryViewType::kPrefixSize), + " but the out-of-line data begins with 0x", + HexEncode(data, BinaryViewType::kPrefixSize)); + }; + + util::span views(data.GetValues(1), + static_cast(data.length)); + util::span data_buffers(data.buffers.data() + 2, data.buffers.size() - 2); + + for (size_t i = 0; i < static_cast(data.length); ++i) { + if (data.IsNull(i)) continue; + + if (views[i].size() < 0) { + return Status::Invalid("View at slot ", i, " has negative size ", + views[i].size()); + } + + if (views[i].is_inline()) { + auto padding_bytes = util::span(views[i].inlined.data).subspan(views[i].size()); + for (auto padding_byte : padding_bytes) { + if (padding_byte != 0) { + return Status::Invalid("View at slot ", i, " was inline with size ", + views[i].size(), + " but its padding bytes were not all zero: ", + HexEncode(padding_bytes.data(), padding_bytes.size())); + } + } + continue; + } + + auto [size, prefix, buffer_index, offset] = views[i].ref; + + if (buffer_index < 0) { + return Status::Invalid("View at slot ", i, " has negative buffer index ", + buffer_index); + } + + if (offset < 0) { + return Status::Invalid("View at slot ", i, " has negative offset ", offset); + } + + if (static_cast(buffer_index) >= data_buffers.size()) { + return Status::IndexError("View at slot ", i, " references buffer ", buffer_index, + " but there are only ", data_buffers.size(), + " data buffers"); + } + const auto& buffer = data_buffers[buffer_index]; + + if (int64_t end = offset + static_cast(size); end > buffer->size()) { + return Status::IndexError( + "View at slot ", i, " references range ", offset, "-", end, " of buffer ", + buffer_index, " but that buffer is only ", buffer->size(), " bytes long"); + } + + RETURN_NOT_OK(CheckPrefix(i, prefix, buffer->data() + offset)); + } + + return Status::OK(); + } + template Status ValidateListLike(const ListType& type) { const ArrayData& values = *data.child_data[0]; @@ -798,7 +896,8 @@ Status ValidateArrayFull(const Array& array) { return ValidateArrayFull(*array.d ARROW_EXPORT Status ValidateUTF8(const ArrayData& data) { - DCHECK(data.type->id() == Type::STRING || data.type->id() == Type::LARGE_STRING); + DCHECK(data.type->id() == Type::STRING || data.type->id() == Type::STRING_VIEW || + data.type->id() == Type::LARGE_STRING); UTF8DataValidator validator{data}; return VisitTypeInline(*data.type, &validator); } @@ -806,5 +905,4 @@ Status ValidateUTF8(const ArrayData& data) { ARROW_EXPORT Status ValidateUTF8(const Array& array) { return ValidateUTF8(*array.data()); } -} // namespace internal -} // namespace arrow +} // namespace arrow::internal diff --git a/cpp/src/arrow/buffer_builder.h b/cpp/src/arrow/buffer_builder.h index e7eea64043ba8..a84c98b6b2491 100644 --- a/cpp/src/arrow/buffer_builder.h +++ b/cpp/src/arrow/buffer_builder.h @@ -143,7 +143,10 @@ class ARROW_EXPORT BufferBuilder { memcpy(data_ + size_, data, static_cast(length)); size_ += length; } - void UnsafeAppend(std::string_view v) { UnsafeAppend(v.data(), v.size()); } + + void UnsafeAppend(std::string_view v) { + UnsafeAppend(v.data(), static_cast(v.size())); + } void UnsafeAppend(const int64_t num_copies, uint8_t value) { memset(data_ + size_, value, static_cast(num_copies)); @@ -268,7 +271,7 @@ class TypedBufferBuilder< template void UnsafeAppend(Iter values_begin, Iter values_end) { - int64_t num_elements = static_cast(std::distance(values_begin, values_end)); + auto num_elements = static_cast(std::distance(values_begin, values_end)); auto data = mutable_data() + length(); bytes_builder_.UnsafeAdvance(num_elements * sizeof(T)); std::copy(values_begin, values_end, data); diff --git a/cpp/src/arrow/builder.cc b/cpp/src/arrow/builder.cc index caddbf9db5578..c7e6207bfefa4 100644 --- a/cpp/src/arrow/builder.cc +++ b/cpp/src/arrow/builder.cc @@ -148,6 +148,8 @@ struct DictionaryBuilderCase { Status Visit(const StringType&) { return CreateFor(); } Status Visit(const LargeBinaryType&) { return CreateFor(); } Status Visit(const LargeStringType&) { return CreateFor(); } + Status Visit(const BinaryViewType&) { return CreateFor(); } + Status Visit(const StringViewType&) { return CreateFor(); } Status Visit(const FixedSizeBinaryType&) { return CreateFor(); } Status Visit(const Decimal128Type&) { return CreateFor(); } Status Visit(const Decimal256Type&) { return CreateFor(); } @@ -190,7 +192,7 @@ struct DictionaryBuilderCase { struct MakeBuilderImpl { template - enable_if_not_nested Visit(const T&) { + enable_if_not_nested Visit(const T& t) { out.reset(new typename TypeTraits::BuilderType(type, pool)); return Status::OK(); } diff --git a/cpp/src/arrow/chunked_array.cc b/cpp/src/arrow/chunked_array.cc index 12937406e7800..c36b736d5d5df 100644 --- a/cpp/src/arrow/chunked_array.cc +++ b/cpp/src/arrow/chunked_array.cc @@ -86,7 +86,7 @@ Result> ChunkedArray::MakeEmpty( return std::make_shared(std::move(new_chunks)); } -bool ChunkedArray::Equals(const ChunkedArray& other) const { +bool ChunkedArray::Equals(const ChunkedArray& other, const EqualOptions& opts) const { if (length_ != other.length()) { return false; } @@ -102,9 +102,9 @@ bool ChunkedArray::Equals(const ChunkedArray& other) const { // the underlying data independently of the chunk size. return internal::ApplyBinaryChunked( *this, other, - [](const Array& left_piece, const Array& right_piece, - int64_t ARROW_ARG_UNUSED(position)) { - if (!left_piece.Equals(right_piece)) { + [&](const Array& left_piece, const Array& right_piece, + int64_t ARROW_ARG_UNUSED(position)) { + if (!left_piece.Equals(right_piece, opts)) { return Status::Invalid("Unequal piece"); } return Status::OK(); @@ -129,14 +129,15 @@ bool mayHaveNaN(const arrow::DataType& type) { } // namespace -bool ChunkedArray::Equals(const std::shared_ptr& other) const { +bool ChunkedArray::Equals(const std::shared_ptr& other, + const EqualOptions& opts) const { if (!other) { return false; } if (this == other.get() && !mayHaveNaN(*type_)) { return true; } - return Equals(*other.get()); + return Equals(*other.get(), opts); } bool ChunkedArray::ApproxEquals(const ChunkedArray& other, diff --git a/cpp/src/arrow/chunked_array.h b/cpp/src/arrow/chunked_array.h index 6ec7d11ac839d..5d300861d85c2 100644 --- a/cpp/src/arrow/chunked_array.h +++ b/cpp/src/arrow/chunked_array.h @@ -152,9 +152,11 @@ class ARROW_EXPORT ChunkedArray { /// /// Two chunked arrays can be equal only if they have equal datatypes. /// However, they may be equal even if they have different chunkings. - bool Equals(const ChunkedArray& other) const; + bool Equals(const ChunkedArray& other, + const EqualOptions& opts = EqualOptions::Defaults()) const; /// \brief Determine if two chunked arrays are equal. - bool Equals(const std::shared_ptr& other) const; + bool Equals(const std::shared_ptr& other, + const EqualOptions& opts = EqualOptions::Defaults()) const; /// \brief Determine if two chunked arrays approximately equal bool ApproxEquals(const ChunkedArray& other, const EqualOptions& = EqualOptions::Defaults()) const; diff --git a/cpp/src/arrow/compare.cc b/cpp/src/arrow/compare.cc index df41cd22c9e06..50cfdd05a14bb 100644 --- a/cpp/src/arrow/compare.cc +++ b/cpp/src/arrow/compare.cc @@ -38,6 +38,7 @@ #include "arrow/tensor.h" #include "arrow/type.h" #include "arrow/type_traits.h" +#include "arrow/util/binary_view_util.h" #include "arrow/util/bit_run_reader.h" #include "arrow/util/bit_util.h" #include "arrow/util/bitmap_ops.h" @@ -261,6 +262,25 @@ class RangeDataEqualsImpl { // Also matches StringType Status Visit(const BinaryType& type) { return CompareBinary(type); } + // Also matches StringViewType + Status Visit(const BinaryViewType& type) { + auto* left_values = left_.GetValues(1) + left_start_idx_; + auto* right_values = right_.GetValues(1) + right_start_idx_; + + auto* left_buffers = left_.buffers.data() + 2; + auto* right_buffers = right_.buffers.data() + 2; + VisitValidRuns([&](int64_t i, int64_t length) { + for (auto end_i = i + length; i < end_i; ++i) { + if (!util::EqualBinaryView(left_values[i], right_values[i], left_buffers, + right_buffers)) { + return false; + } + } + return true; + }); + return Status::OK(); + } + // Also matches LargeStringType Status Visit(const LargeBinaryType& type) { return CompareBinary(type); } @@ -632,6 +652,11 @@ class TypeEqualsVisitor { return Status::OK(); } + Status Visit(const BinaryViewType&) { + result_ = true; + return Status::OK(); + } + template enable_if_interval Visit(const T& left) { const auto& right = checked_cast(right_); @@ -802,8 +827,7 @@ class ScalarEqualsVisitor { Status Visit(const DoubleScalar& left) { return CompareFloating(left); } template - typename std::enable_if::value, Status>::type - Visit(const T& left) { + enable_if_t::value, Status> Visit(const T& left) { const auto& right = checked_cast(right_); result_ = internal::SharedPtrEquals(left.value, right.value); return Status::OK(); diff --git a/cpp/src/arrow/compute/CMakeLists.txt b/cpp/src/arrow/compute/CMakeLists.txt index 001424dd42072..1134e0a98ae45 100644 --- a/cpp/src/arrow/compute/CMakeLists.txt +++ b/cpp/src/arrow/compute/CMakeLists.txt @@ -89,9 +89,7 @@ add_arrow_test(internals_test kernel_test.cc light_array_test.cc registry_test.cc - key_hash_test.cc - EXTRA_LINK_LIBS - ${ARROW_GTEST_GMOCK}) + key_hash_test.cc) add_arrow_compute_test(expression_test SOURCES expression_test.cc) diff --git a/cpp/src/arrow/compute/kernels/CMakeLists.txt b/cpp/src/arrow/compute/kernels/CMakeLists.txt index 78743050625a4..4350cd57ff026 100644 --- a/cpp/src/arrow/compute/kernels/CMakeLists.txt +++ b/cpp/src/arrow/compute/kernels/CMakeLists.txt @@ -23,8 +23,7 @@ if(ARROW_TESTING) add_library(arrow_compute_kernels_testing OBJECT test_util.cc) # Even though this is still just an object library we still need to "link" our # dependencies so that include paths are configured correctly - target_link_libraries(arrow_compute_kernels_testing ${ARROW_GTEST_GTEST} - ${ARROW_GTEST_GMOCK}) + target_link_libraries(arrow_compute_kernels_testing PRIVATE ${ARROW_GTEST_GMOCK}) endif() add_arrow_test(scalar_cast_test diff --git a/cpp/src/arrow/compute/kernels/scalar_round.cc b/cpp/src/arrow/compute/kernels/scalar_round.cc index 5dc628f8d95c5..36e59c8c1deac 100644 --- a/cpp/src/arrow/compute/kernels/scalar_round.cc +++ b/cpp/src/arrow/compute/kernels/scalar_round.cc @@ -1259,7 +1259,7 @@ std::shared_ptr MakeUnaryRoundFunction(std::string name, RoundKernelGenerator kernel_generator; for (const auto& tys : {NumericTypes(), {decimal128(1, 0), decimal256(1, 0)}}) { for (const auto& ty : tys) { - ArrayKernelExec exec; + ArrayKernelExec exec = nullptr; KernelInit init; DCHECK_OK(VisitTypeInline(*ty, &kernel_generator, &exec, &init)); DCHECK_OK(func->AddKernel( @@ -1280,7 +1280,7 @@ std::shared_ptr MakeBinaryRoundFunction(const std::string& name, RoundKernelGenerator kernel_generator; for (const auto& tys : {NumericTypes(), {decimal128(1, 0), decimal256(1, 0)}}) { for (const auto& ty : tys) { - ArrayKernelExec exec; + ArrayKernelExec exec = nullptr; KernelInit init; DCHECK_OK(VisitTypeInline(*ty, &kernel_generator, &exec, &init)); DCHECK_OK(func->AddKernel( diff --git a/cpp/src/arrow/compute/kernels/vector_hash.cc b/cpp/src/arrow/compute/kernels/vector_hash.cc index d9143b760f32b..5426dc405429c 100644 --- a/cpp/src/arrow/compute/kernels/vector_hash.cc +++ b/cpp/src/arrow/compute/kernels/vector_hash.cc @@ -31,6 +31,7 @@ #include "arrow/compute/kernels/common_internal.h" #include "arrow/result.h" #include "arrow/util/hashing.h" +#include "arrow/util/unreachable.h" namespace arrow { @@ -262,7 +263,7 @@ class HashKernel : public KernelState { // Base class for all "regular" hash kernel implementations // (NullType has a separate implementation) -template class RegularHashKernel : public HashKernel { public: @@ -503,39 +504,13 @@ class DictionaryHashKernel : public HashKernel { }; // ---------------------------------------------------------------------- - -template -struct HashKernelTraits {}; - -template -struct HashKernelTraits> { - using HashKernel = NullHashKernel; -}; - -template -struct HashKernelTraits> { - using HashKernel = RegularHashKernel; -}; - -template -struct HashKernelTraits> { - using HashKernel = RegularHashKernel; -}; - -template -Result> HashInitImpl(KernelContext* ctx, - const KernelInitArgs& args) { - using HashKernelType = typename HashKernelTraits::HashKernel; - auto result = std::make_unique(args.inputs[0].GetSharedPtr(), - args.options, ctx->memory_pool()); - RETURN_NOT_OK(result->Reset()); - return std::move(result); -} - -template +template Result> HashInit(KernelContext* ctx, const KernelInitArgs& args) { - return HashInitImpl(ctx, args); + auto result = std::make_unique(args.inputs[0].GetSharedPtr(), args.options, + ctx->memory_pool()); + RETURN_NOT_OK(result->Reset()); + return std::move(result); } template @@ -544,22 +519,22 @@ KernelInit GetHashInit(Type::type type_id) { // representation switch (type_id) { case Type::NA: - return HashInit; + return HashInit>; case Type::BOOL: - return HashInit; + return HashInit>; case Type::INT8: case Type::UINT8: - return HashInit; + return HashInit>; case Type::INT16: case Type::UINT16: - return HashInit; + return HashInit>; case Type::INT32: case Type::UINT32: case Type::FLOAT: case Type::DATE32: case Type::TIME32: case Type::INTERVAL_MONTHS: - return HashInit; + return HashInit>; case Type::INT64: case Type::UINT64: case Type::DOUBLE: @@ -568,22 +543,24 @@ KernelInit GetHashInit(Type::type type_id) { case Type::TIMESTAMP: case Type::DURATION: case Type::INTERVAL_DAY_TIME: - return HashInit; + return HashInit>; case Type::BINARY: case Type::STRING: - return HashInit; + return HashInit>; case Type::LARGE_BINARY: case Type::LARGE_STRING: - return HashInit; + return HashInit>; + case Type::BINARY_VIEW: + case Type::STRING_VIEW: + return HashInit>; case Type::FIXED_SIZE_BINARY: case Type::DECIMAL128: case Type::DECIMAL256: - return HashInit; + return HashInit>; case Type::INTERVAL_MONTH_DAY_NANO: - return HashInit; + return HashInit>; default: - DCHECK(false); - return nullptr; + Unreachable("non hashable type"); } } @@ -593,31 +570,11 @@ template Result> DictionaryHashInit(KernelContext* ctx, const KernelInitArgs& args) { const auto& dict_type = checked_cast(*args.inputs[0].type); - Result> indices_hasher; - switch (dict_type.index_type()->id()) { - case Type::INT8: - case Type::UINT8: - indices_hasher = HashInitImpl(ctx, args); - break; - case Type::INT16: - case Type::UINT16: - indices_hasher = HashInitImpl(ctx, args); - break; - case Type::INT32: - case Type::UINT32: - indices_hasher = HashInitImpl(ctx, args); - break; - case Type::INT64: - case Type::UINT64: - indices_hasher = HashInitImpl(ctx, args); - break; - default: - DCHECK(false) << "Unsupported dictionary index type"; - break; - } - RETURN_NOT_OK(indices_hasher); - return std::make_unique(std::move(indices_hasher.ValueOrDie()), - dict_type.value_type()); + ARROW_ASSIGN_OR_RAISE(auto indices_hasher, + GetHashInit(dict_type.index_type()->id())(ctx, args)); + return std::make_unique( + checked_pointer_cast(std::move(indices_hasher)), + dict_type.value_type()); } Status HashExec(KernelContext* ctx, const ExecSpan& batch, ExecResult* out) { diff --git a/cpp/src/arrow/dataset/CMakeLists.txt b/cpp/src/arrow/dataset/CMakeLists.txt index eb8fb54803aa9..1afef3e3b0463 100644 --- a/cpp/src/arrow/dataset/CMakeLists.txt +++ b/cpp/src/arrow/dataset/CMakeLists.txt @@ -113,8 +113,7 @@ if(ARROW_TESTING) add_library(arrow_dataset_testing OBJECT test_util_internal.cc) # Even though this is still just an object library we still need to "link" our # dependencies so that include paths are configured correctly - target_link_libraries(arrow_dataset_testing ${ARROW_DATASET_TEST_LINK_LIBS}) - target_link_libraries(arrow_dataset_testing ${ARROW_GTEST_GTEST}) + target_link_libraries(arrow_dataset_testing PRIVATE ${ARROW_DATASET_TEST_LINK_LIBS}) list(APPEND ARROW_DATASET_TEST_LINK_LIBS arrow_dataset_testing) endif() diff --git a/cpp/src/arrow/dataset/file_parquet.cc b/cpp/src/arrow/dataset/file_parquet.cc index d486f194f38a3..65ad70181f28a 100644 --- a/cpp/src/arrow/dataset/file_parquet.cc +++ b/cpp/src/arrow/dataset/file_parquet.cc @@ -504,11 +504,6 @@ Future> ParquetFileFormat::GetReader default_fragment_scan_options)); auto properties = MakeReaderProperties(*this, parquet_scan_options.get(), source.path(), source.filesystem(), options->pool); - ARROW_ASSIGN_OR_RAISE(auto input, source.Open()); - // TODO(ARROW-12259): workaround since we have Future<(move-only type)> - auto reader_fut = parquet::ParquetFileReader::OpenAsync( - std::move(input), std::move(properties), metadata); - auto path = source.path(); auto self = checked_pointer_cast(shared_from_this()); return source.OpenAsync().Then( diff --git a/cpp/src/arrow/engine/substrait/expression_internal.cc b/cpp/src/arrow/engine/substrait/expression_internal.cc index 0df8425609ff1..d395261597696 100644 --- a/cpp/src/arrow/engine/substrait/expression_internal.cc +++ b/cpp/src/arrow/engine/substrait/expression_internal.cc @@ -727,6 +727,8 @@ struct ScalarToProtoImpl { s); } + Status Visit(const BinaryViewScalar& s) { return NotImplemented(s); } + Status Visit(const FixedSizeBinaryScalar& s) { return FromBuffer( [](Lit* lit, std::string&& s) { lit->set_fixed_binary(std::move(s)); }, s); diff --git a/cpp/src/arrow/engine/substrait/type_internal.cc b/cpp/src/arrow/engine/substrait/type_internal.cc index 1f9141f36ba6b..d3fb058137e6a 100644 --- a/cpp/src/arrow/engine/substrait/type_internal.cc +++ b/cpp/src/arrow/engine/substrait/type_internal.cc @@ -263,6 +263,8 @@ struct DataTypeToProtoImpl { return SetWith(&substrait::Type::set_allocated_binary); } + Status Visit(const BinaryViewType& t) { return NotImplemented(t); } + Status Visit(const FixedSizeBinaryType& t) { SetWithThen(&substrait::Type::set_allocated_fixed_binary)->set_length(t.byte_width()); return Status::OK(); diff --git a/cpp/src/arrow/filesystem/CMakeLists.txt b/cpp/src/arrow/filesystem/CMakeLists.txt index b997ca0a387a6..a42a8d0f8c1b6 100644 --- a/cpp/src/arrow/filesystem/CMakeLists.txt +++ b/cpp/src/arrow/filesystem/CMakeLists.txt @@ -90,7 +90,7 @@ if(ARROW_S3) if(ARROW_BUILD_TESTS) add_executable(arrow-s3fs-narrative-test s3fs_narrative_test.cc) target_link_libraries(arrow-s3fs-narrative-test ${ARROW_TEST_LINK_LIBS} - ${GFLAGS_LIBRARIES} ${ARROW_GTEST_GTEST}) + ${GFLAGS_LIBRARIES}) add_dependencies(arrow-tests arrow-s3fs-narrative-test) endif() diff --git a/cpp/src/arrow/filesystem/azurefs.cc b/cpp/src/arrow/filesystem/azurefs.cc index 179be069b2acf..6359183d90bb4 100644 --- a/cpp/src/arrow/filesystem/azurefs.cc +++ b/cpp/src/arrow/filesystem/azurefs.cc @@ -16,14 +16,17 @@ // under the License. #include "arrow/filesystem/azurefs.h" +#include "arrow/filesystem/azurefs_internal.h" #include +#include #include "arrow/buffer.h" #include "arrow/filesystem/path_util.h" #include "arrow/filesystem/util_internal.h" #include "arrow/result.h" #include "arrow/util/checked_cast.h" +#include "arrow/util/formatting.h" #include "arrow/util/future.h" #include "arrow/util/key_value_metadata.h" #include "arrow/util/logging.h" @@ -58,6 +61,7 @@ Status AzureOptions::ConfigureAccountKeyCredentials(const std::string& account_n credentials_kind = AzureCredentialsKind::StorageCredentials; return Status::OK(); } + namespace { // An AzureFileSystem represents a single Azure storage account. AzurePath describes a @@ -78,18 +82,17 @@ struct AzurePath { "Expected an Azure object path of the form 'container/path...', got a URI: '", s, "'"); } - const auto src = internal::RemoveTrailingSlash(s); - auto first_sep = src.find_first_of(internal::kSep); + auto first_sep = s.find_first_of(internal::kSep); if (first_sep == 0) { return Status::Invalid("Path cannot start with a separator ('", s, "')"); } if (first_sep == std::string::npos) { - return AzurePath{std::string(src), std::string(src), "", {}}; + return AzurePath{std::string(s), std::string(s), "", {}}; } AzurePath path; - path.full_path = std::string(src); - path.container = std::string(src.substr(0, first_sep)); - path.path_to_file = std::string(src.substr(first_sep + 1)); + path.full_path = std::string(s); + path.container = std::string(s.substr(0, first_sep)); + path.path_to_file = std::string(s.substr(first_sep + 1)); path.path_to_file_parts = internal::SplitAbstractPath(path.path_to_file); RETURN_NOT_OK(Validate(path)); return path; @@ -145,18 +148,148 @@ Status ValidateFilePath(const AzurePath& path) { return Status::OK(); } -Status ErrorToStatus(const std::string& prefix, - const Azure::Storage::StorageException& exception) { - return Status::IOError(prefix, " Azure Error: ", exception.what()); +template +std::string FormatValue(typename TypeTraits::CType value) { + struct StringAppender { + std::string string; + Status operator()(std::string_view view) { + string.append(view.data(), view.size()); + return Status::OK(); + } + } appender; + arrow::internal::StringFormatter formatter; + ARROW_UNUSED(formatter(value, appender)); + return appender.string; } -template -std::shared_ptr GetObjectMetadata(const ObjectResult& result) { - auto md = std::make_shared(); - for (auto prop : result) { - md->Append(prop.first, prop.second); +std::shared_ptr PropertiesToMetadata( + const Azure::Storage::Blobs::Models::BlobProperties& properties) { + auto metadata = std::make_shared(); + // Not supported yet: + // * properties.ObjectReplicationSourceProperties + // * properties.Metadata + // + // They may have the same key defined in the following + // metadata->Append() list. If we have duplicated key in metadata, + // the first value may be only used by users because + // KeyValueMetadata::Get() returns the first found value. Note that + // users can use all values by using KeyValueMetadata::keys() and + // KeyValueMetadata::values(). + if (properties.ImmutabilityPolicy.HasValue()) { + metadata->Append("Immutability-Policy-Expires-On", + properties.ImmutabilityPolicy.Value().ExpiresOn.ToString()); + metadata->Append("Immutability-Policy-Mode", + properties.ImmutabilityPolicy.Value().PolicyMode.ToString()); + } + metadata->Append("Content-Type", properties.HttpHeaders.ContentType); + metadata->Append("Content-Encoding", properties.HttpHeaders.ContentEncoding); + metadata->Append("Content-Language", properties.HttpHeaders.ContentLanguage); + const auto& content_hash = properties.HttpHeaders.ContentHash.Value; + metadata->Append("Content-Hash", HexEncode(content_hash.data(), content_hash.size())); + metadata->Append("Content-Disposition", properties.HttpHeaders.ContentDisposition); + metadata->Append("Cache-Control", properties.HttpHeaders.CacheControl); + metadata->Append("Last-Modified", properties.LastModified.ToString()); + metadata->Append("Created-On", properties.CreatedOn.ToString()); + if (properties.ObjectReplicationDestinationPolicyId.HasValue()) { + metadata->Append("Object-Replication-Destination-Policy-Id", + properties.ObjectReplicationDestinationPolicyId.Value()); + } + metadata->Append("Blob-Type", properties.BlobType.ToString()); + if (properties.CopyCompletedOn.HasValue()) { + metadata->Append("Copy-Completed-On", properties.CopyCompletedOn.Value().ToString()); + } + if (properties.CopyStatusDescription.HasValue()) { + metadata->Append("Copy-Status-Description", properties.CopyStatusDescription.Value()); + } + if (properties.CopyId.HasValue()) { + metadata->Append("Copy-Id", properties.CopyId.Value()); + } + if (properties.CopyProgress.HasValue()) { + metadata->Append("Copy-Progress", properties.CopyProgress.Value()); + } + if (properties.CopySource.HasValue()) { + metadata->Append("Copy-Source", properties.CopySource.Value()); + } + if (properties.CopyStatus.HasValue()) { + metadata->Append("Copy-Status", properties.CopyStatus.Value().ToString()); + } + if (properties.IsIncrementalCopy.HasValue()) { + metadata->Append("Is-Incremental-Copy", + FormatValue(properties.IsIncrementalCopy.Value())); + } + if (properties.IncrementalCopyDestinationSnapshot.HasValue()) { + metadata->Append("Incremental-Copy-Destination-Snapshot", + properties.IncrementalCopyDestinationSnapshot.Value()); + } + if (properties.LeaseDuration.HasValue()) { + metadata->Append("Lease-Duration", properties.LeaseDuration.Value().ToString()); + } + if (properties.LeaseState.HasValue()) { + metadata->Append("Lease-State", properties.LeaseState.Value().ToString()); + } + if (properties.LeaseStatus.HasValue()) { + metadata->Append("Lease-Status", properties.LeaseStatus.Value().ToString()); + } + metadata->Append("Content-Length", FormatValue(properties.BlobSize)); + if (properties.ETag.HasValue()) { + metadata->Append("ETag", properties.ETag.ToString()); + } + if (properties.SequenceNumber.HasValue()) { + metadata->Append("Sequence-Number", + FormatValue(properties.SequenceNumber.Value())); + } + if (properties.CommittedBlockCount.HasValue()) { + metadata->Append("Committed-Block-Count", + FormatValue(properties.CommittedBlockCount.Value())); + } + metadata->Append("IsServerEncrypted", + FormatValue(properties.IsServerEncrypted)); + if (properties.EncryptionKeySha256.HasValue()) { + const auto& sha256 = properties.EncryptionKeySha256.Value(); + metadata->Append("Encryption-Key-Sha-256", HexEncode(sha256.data(), sha256.size())); + } + if (properties.EncryptionScope.HasValue()) { + metadata->Append("Encryption-Scope", properties.EncryptionScope.Value()); + } + if (properties.AccessTier.HasValue()) { + metadata->Append("Access-Tier", properties.AccessTier.Value().ToString()); + } + if (properties.IsAccessTierInferred.HasValue()) { + metadata->Append("Is-Access-Tier-Inferred", + FormatValue(properties.IsAccessTierInferred.Value())); + } + if (properties.ArchiveStatus.HasValue()) { + metadata->Append("Archive-Status", properties.ArchiveStatus.Value().ToString()); } - return md; + if (properties.AccessTierChangedOn.HasValue()) { + metadata->Append("Access-Tier-Changed-On", + properties.AccessTierChangedOn.Value().ToString()); + } + if (properties.VersionId.HasValue()) { + metadata->Append("Version-Id", properties.VersionId.Value()); + } + if (properties.IsCurrentVersion.HasValue()) { + metadata->Append("Is-Current-Version", + FormatValue(properties.IsCurrentVersion.Value())); + } + if (properties.TagCount.HasValue()) { + metadata->Append("Tag-Count", FormatValue(properties.TagCount.Value())); + } + if (properties.ExpiresOn.HasValue()) { + metadata->Append("Expires-On", properties.ExpiresOn.Value().ToString()); + } + if (properties.IsSealed.HasValue()) { + metadata->Append("Is-Sealed", FormatValue(properties.IsSealed.Value())); + } + if (properties.RehydratePriority.HasValue()) { + metadata->Append("Rehydrate-Priority", + properties.RehydratePriority.Value().ToString()); + } + if (properties.LastAccessedOn.HasValue()) { + metadata->Append("Last-Accessed-On", properties.LastAccessedOn.Value().ToString()); + } + metadata->Append("Has-Legal-Hold", FormatValue(properties.HasLegalHold)); + return metadata; } class ObjectInputFile final : public io::RandomAccessFile { @@ -176,15 +309,17 @@ class ObjectInputFile final : public io::RandomAccessFile { try { auto properties = blob_client_->GetProperties(); content_length_ = properties.Value.BlobSize; - metadata_ = GetObjectMetadata(properties.Value.Metadata); + metadata_ = PropertiesToMetadata(properties.Value); return Status::OK(); } catch (const Azure::Storage::StorageException& exception) { if (exception.StatusCode == Azure::Core::Http::HttpStatusCode::NotFound) { - // Could be either container or blob not found. return PathNotFound(path_); } - return ErrorToStatus( - "When fetching properties for '" + blob_client_->GetUrl() + "': ", exception); + return internal::ExceptionToStatus( + "GetProperties failed for '" + blob_client_->GetUrl() + + "' with an unexpected Azure error. Can not initialise an ObjectInputFile " + "without knowing the file size.", + exception); } } @@ -261,10 +396,12 @@ class ObjectInputFile final : public io::RandomAccessFile { ->DownloadTo(reinterpret_cast(out), nbytes, download_options) .Value.ContentRange.Length.Value(); } catch (const Azure::Storage::StorageException& exception) { - return ErrorToStatus("When reading from '" + blob_client_->GetUrl() + - "' at position " + std::to_string(position) + " for " + - std::to_string(nbytes) + " bytes: ", - exception); + return internal::ExceptionToStatus("DownloadTo from '" + blob_client_->GetUrl() + + "' at position " + std::to_string(position) + + " for " + std::to_string(nbytes) + + " bytes failed with an Azure error. ReadAt " + "failed to read the required byte range.", + exception); } } @@ -308,7 +445,6 @@ class ObjectInputFile final : public io::RandomAccessFile { int64_t content_length_ = kNoSize; std::shared_ptr metadata_; }; - } // namespace // ----------------------------------------------------------------------- @@ -317,27 +453,136 @@ class ObjectInputFile final : public io::RandomAccessFile { class AzureFileSystem::Impl { public: io::IOContext io_context_; - std::shared_ptr service_client_; + std::unique_ptr + datalake_service_client_; + std::unique_ptr blob_service_client_; AzureOptions options_; + internal::HierarchicalNamespaceDetector hierarchical_namespace_; explicit Impl(AzureOptions options, io::IOContext io_context) : io_context_(io_context), options_(std::move(options)) {} Status Init() { - service_client_ = std::make_shared( + blob_service_client_ = std::make_unique( options_.account_blob_url, options_.storage_credentials_provider); + datalake_service_client_ = + std::make_unique( + options_.account_dfs_url, options_.storage_credentials_provider); + RETURN_NOT_OK(hierarchical_namespace_.Init(datalake_service_client_.get())); return Status::OK(); } const AzureOptions& options() const { return options_; } + public: + Result GetFileInfo(const AzurePath& path) { + FileInfo info; + info.set_path(path.full_path); + + if (path.container.empty()) { + DCHECK(path.path_to_file.empty()); // The path is invalid if the container is empty + // but not path_to_file. + // path must refer to the root of the Azure storage account. This is a directory, + // and there isn't any extra metadata to fetch. + info.set_type(FileType::Directory); + return info; + } + if (path.path_to_file.empty()) { + // path refers to a container. This is a directory if it exists. + auto container_client = + blob_service_client_->GetBlobContainerClient(path.container); + try { + auto properties = container_client.GetProperties(); + info.set_type(FileType::Directory); + info.set_mtime( + std::chrono::system_clock::time_point(properties.Value.LastModified)); + return info; + } catch (const Azure::Storage::StorageException& exception) { + if (exception.StatusCode == Azure::Core::Http::HttpStatusCode::NotFound) { + info.set_type(FileType::NotFound); + return info; + } + return internal::ExceptionToStatus( + "GetProperties for '" + container_client.GetUrl() + + "' failed with an unexpected Azure error. GetFileInfo is unable to " + "determine whether the container exists.", + exception); + } + } + auto file_client = datalake_service_client_->GetFileSystemClient(path.container) + .GetFileClient(path.path_to_file); + try { + auto properties = file_client.GetProperties(); + if (properties.Value.IsDirectory) { + info.set_type(FileType::Directory); + } else if (internal::HasTrailingSlash(path.path_to_file)) { + // For a path with a trailing slash a hierarchical namespace may return a blob + // with that trailing slash removed. For consistency with flat namespace and + // other filesystems we chose to return NotFound. + info.set_type(FileType::NotFound); + return info; + } else { + info.set_type(FileType::File); + info.set_size(properties.Value.FileSize); + } + info.set_mtime( + std::chrono::system_clock::time_point(properties.Value.LastModified)); + return info; + } catch (const Azure::Storage::StorageException& exception) { + if (exception.StatusCode == Azure::Core::Http::HttpStatusCode::NotFound) { + ARROW_ASSIGN_OR_RAISE(auto hierarchical_namespace_enabled, + hierarchical_namespace_.Enabled(path.container)); + if (hierarchical_namespace_enabled) { + // If the hierarchical namespace is enabled, then the storage account will have + // explicit directories. Neither a file nor a directory was found. + info.set_type(FileType::NotFound); + return info; + } + // On flat namespace accounts there are no real directories. Directories are only + // implied by using `/` in the blob name. + Azure::Storage::Blobs::ListBlobsOptions list_blob_options; + + // If listing the prefix `path.path_to_file` with trailing slash returns at least + // one result then `path` refers to an implied directory. + auto prefix = internal::EnsureTrailingSlash(path.path_to_file); + list_blob_options.Prefix = prefix; + // We only need to know if there is at least one result, so minimise page size + // for efficiency. + list_blob_options.PageSizeHint = 1; + + try { + auto paged_list_result = + blob_service_client_->GetBlobContainerClient(path.container) + .ListBlobs(list_blob_options); + if (paged_list_result.Blobs.size() > 0) { + info.set_type(FileType::Directory); + } else { + info.set_type(FileType::NotFound); + } + return info; + } catch (const Azure::Storage::StorageException& exception) { + return internal::ExceptionToStatus( + "ListBlobs for '" + prefix + + "' failed with an unexpected Azure error. GetFileInfo is unable to " + "determine whether the path should be considered an implied directory.", + exception); + } + } + return internal::ExceptionToStatus( + "GetProperties for '" + file_client.GetUrl() + + "' failed with an unexpected " + "Azure error. GetFileInfo is unable to determine whether the path exists.", + exception); + } + } + Result> OpenInputFile(const std::string& s, AzureFileSystem* fs) { ARROW_RETURN_NOT_OK(internal::AssertNoTrailingSlash(s)); ARROW_ASSIGN_OR_RAISE(auto path, AzurePath::FromString(s)); RETURN_NOT_OK(ValidateFilePath(path)); auto blob_client = std::make_shared( - service_client_->GetBlobContainerClient(path.container) + blob_service_client_->GetBlobContainerClient(path.container) .GetBlobClient(path.path_to_file)); auto ptr = @@ -358,7 +603,7 @@ class AzureFileSystem::Impl { ARROW_ASSIGN_OR_RAISE(auto path, AzurePath::FromString(info.path())); RETURN_NOT_OK(ValidateFilePath(path)); auto blob_client = std::make_shared( - service_client_->GetBlobContainerClient(path.container) + blob_service_client_->GetBlobContainerClient(path.container) .GetBlobClient(path.path_to_file)); auto ptr = std::make_shared(blob_client, fs->io_context(), @@ -382,7 +627,8 @@ bool AzureFileSystem::Equals(const FileSystem& other) const { } Result AzureFileSystem::GetFileInfo(const std::string& path) { - return Status::NotImplemented("The Azure FileSystem is not fully implemented"); + ARROW_ASSIGN_OR_RAISE(auto p, AzurePath::FromString(path)); + return impl_->GetFileInfo(p); } Result AzureFileSystem::GetFileInfo(const FileSelector& select) { diff --git a/cpp/src/arrow/filesystem/azurefs_internal.cc b/cpp/src/arrow/filesystem/azurefs_internal.cc new file mode 100644 index 0000000000000..3e545d670cb04 --- /dev/null +++ b/cpp/src/arrow/filesystem/azurefs_internal.cc @@ -0,0 +1,88 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include "arrow/filesystem/azurefs_internal.h" + +#include + +#include "arrow/result.h" + +namespace arrow::fs::internal { + +Status ExceptionToStatus(const std::string& prefix, + const Azure::Storage::StorageException& exception) { + return Status::IOError(prefix, " Azure Error: ", exception.what()); +} + +Status HierarchicalNamespaceDetector::Init( + Azure::Storage::Files::DataLake::DataLakeServiceClient* datalake_service_client) { + datalake_service_client_ = datalake_service_client; + return Status::OK(); +} + +Result HierarchicalNamespaceDetector::Enabled(const std::string& container_name) { + // Hierarchical namespace can't easily be changed after the storage account is created + // and its common across all containers in the storage account. Do nothing until we've + // checked for a cached result. + if (enabled_.has_value()) { + return enabled_.value(); + } + + // This approach is inspired by hadoop-azure + // https://github.com/apache/hadoop/blob/7c6af6a5f626d18d68b656d085cc23e4c1f7a1ef/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java#L356. + // Unfortunately `blob_service_client->GetAccountInfo()` requires significantly + // elevated permissions. + // https://learn.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties?tabs=azure-ad#authorization + auto filesystem_client = datalake_service_client_->GetFileSystemClient(container_name); + auto directory_client = filesystem_client.GetDirectoryClient("/"); + try { + directory_client.GetAccessControlList(); + enabled_ = true; + } catch (const Azure::Storage::StorageException& exception) { + // GetAccessControlList will fail on storage accounts without hierarchical + // namespace enabled. + + if (exception.StatusCode == Azure::Core::Http::HttpStatusCode::BadRequest || + exception.StatusCode == Azure::Core::Http::HttpStatusCode::Conflict) { + // Flat namespace storage accounts with soft delete enabled return + // Conflict - This endpoint does not support BlobStorageEvents or SoftDelete + // otherwise it returns: BadRequest - This operation is only supported on a + // hierarchical namespace account. + enabled_ = false; + } else if (exception.StatusCode == Azure::Core::Http::HttpStatusCode::NotFound) { + // Azurite returns NotFound. + try { + filesystem_client.GetProperties(); + enabled_ = false; + } catch (const Azure::Storage::StorageException& exception) { + return ExceptionToStatus("Failed to confirm '" + filesystem_client.GetUrl() + + "' is an accessible container. Therefore the " + "hierarchical namespace check was invalid.", + exception); + } + } else { + return ExceptionToStatus( + "GetAccessControlList for '" + directory_client.GetUrl() + + "' failed with an unexpected Azure error, while checking " + "whether the storage account has hierarchical namespace enabled.", + exception); + } + } + return enabled_.value(); +} + +} // namespace arrow::fs::internal diff --git a/cpp/src/arrow/filesystem/azurefs_internal.h b/cpp/src/arrow/filesystem/azurefs_internal.h new file mode 100644 index 0000000000000..c3da96239a18f --- /dev/null +++ b/cpp/src/arrow/filesystem/azurefs_internal.h @@ -0,0 +1,42 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#pragma once + +#include + +#include + +#include "arrow/result.h" + +namespace arrow::fs::internal { + +Status ExceptionToStatus(const std::string& prefix, + const Azure::Storage::StorageException& exception); + +class HierarchicalNamespaceDetector { + public: + Status Init( + Azure::Storage::Files::DataLake::DataLakeServiceClient* datalake_service_client); + Result Enabled(const std::string& container_name); + + private: + Azure::Storage::Files::DataLake::DataLakeServiceClient* datalake_service_client_; + std::optional enabled_; +}; + +} // namespace arrow::fs::internal diff --git a/cpp/src/arrow/filesystem/azurefs_test.cc b/cpp/src/arrow/filesystem/azurefs_test.cc index 5d454bdc33f47..c08a4b50b77a8 100644 --- a/cpp/src/arrow/filesystem/azurefs_test.cc +++ b/cpp/src/arrow/filesystem/azurefs_test.cc @@ -34,6 +34,7 @@ #include #include "arrow/filesystem/azurefs.h" +#include "arrow/filesystem/azurefs_internal.h" #include #include @@ -46,11 +47,16 @@ #include #include #include +#include +#include "arrow/filesystem/test_util.h" +#include "arrow/result.h" #include "arrow/testing/gtest_util.h" #include "arrow/testing/util.h" #include "arrow/util/io_util.h" #include "arrow/util/key_value_metadata.h" +#include "arrow/util/string.h" +#include "arrow/util/value_parsing.h" namespace arrow { using internal::TemporaryDir; @@ -137,34 +143,37 @@ TEST(AzureFileSystem, OptionsCompare) { EXPECT_TRUE(options.Equals(options)); } -class TestAzureFileSystem : public ::testing::Test { +class AzureFileSystemTest : public ::testing::Test { public: std::shared_ptr fs_; - std::shared_ptr service_client_; + std::unique_ptr blob_service_client_; + std::unique_ptr + datalake_service_client_; + AzureOptions options_; std::mt19937_64 generator_; std::string container_name_; + bool suite_skipped_ = false; - TestAzureFileSystem() : generator_(std::random_device()()) {} + AzureFileSystemTest() : generator_(std::random_device()()) {} - AzureOptions MakeOptions() { - const std::string& account_name = GetAzuriteEnv()->account_name(); - const std::string& account_key = GetAzuriteEnv()->account_key(); - AzureOptions options; - options.backend = AzureBackend::Azurite; - ARROW_EXPECT_OK(options.ConfigureAccountKeyCredentials(account_name, account_key)); - return options; - } + virtual Result MakeOptions() = 0; void SetUp() override { - ASSERT_THAT(GetAzuriteEnv(), NotNull()); - ASSERT_OK(GetAzuriteEnv()->status()); - - container_name_ = RandomChars(32); auto options = MakeOptions(); - service_client_ = std::make_shared( - options.account_blob_url, options.storage_credentials_provider); - ASSERT_OK_AND_ASSIGN(fs_, AzureFileSystem::Make(options)); - auto container_client = service_client_->GetBlobContainerClient(container_name_); + if (options.ok()) { + options_ = *options; + } else { + suite_skipped_ = true; + GTEST_SKIP() << options.status().message(); + } + container_name_ = RandomChars(32); + blob_service_client_ = std::make_unique( + options_.account_blob_url, options_.storage_credentials_provider); + datalake_service_client_ = + std::make_unique( + options_.account_dfs_url, options_.storage_credentials_provider); + ASSERT_OK_AND_ASSIGN(fs_, AzureFileSystem::Make(options_)); + auto container_client = blob_service_client_->GetBlobContainerClient(container_name_); container_client.CreateIfNotExists(); auto blob_client = container_client.GetBlockBlobClient(PreexistingObjectName()); @@ -173,10 +182,13 @@ class TestAzureFileSystem : public ::testing::Test { } void TearDown() override { - auto containers = service_client_->ListBlobContainers(); - for (auto container : containers.BlobContainers) { - auto container_client = service_client_->GetBlobContainerClient(container.Name); - container_client.DeleteIfExists(); + if (!suite_skipped_) { + auto containers = blob_service_client_->ListBlobContainers(); + for (auto container : containers.BlobContainers) { + auto container_client = + blob_service_client_->GetBlobContainerClient(container.Name); + container_client.DeleteIfExists(); + } } } @@ -216,15 +228,175 @@ class TestAzureFileSystem : public ::testing::Test { void UploadLines(const std::vector& lines, const char* path_to_file, int total_size) { // TODO(GH-38333): Switch to using Azure filesystem to write once its implemented. - auto blob_client = service_client_->GetBlobContainerClient(PreexistingContainerName()) - .GetBlockBlobClient(path_to_file); + auto blob_client = + blob_service_client_->GetBlobContainerClient(PreexistingContainerName()) + .GetBlockBlobClient(path_to_file); std::string all_lines = std::accumulate(lines.begin(), lines.end(), std::string("")); blob_client.UploadFrom(reinterpret_cast(all_lines.data()), total_size); } + + void RunGetFileInfoObjectWithNestedStructureTest(); + void RunGetFileInfoObjectTest(); +}; + +class AzuriteFileSystemTest : public AzureFileSystemTest { + Result MakeOptions() { + EXPECT_THAT(GetAzuriteEnv(), NotNull()); + ARROW_EXPECT_OK(GetAzuriteEnv()->status()); + AzureOptions options; + options.backend = AzureBackend::Azurite; + ARROW_EXPECT_OK(options.ConfigureAccountKeyCredentials( + GetAzuriteEnv()->account_name(), GetAzuriteEnv()->account_key())); + return options; + } }; -TEST_F(TestAzureFileSystem, OpenInputStreamString) { +class AzureFlatNamespaceFileSystemTest : public AzureFileSystemTest { + Result MakeOptions() override { + AzureOptions options; + const auto account_key = std::getenv("AZURE_FLAT_NAMESPACE_ACCOUNT_KEY"); + const auto account_name = std::getenv("AZURE_FLAT_NAMESPACE_ACCOUNT_NAME"); + if (account_key && account_name) { + RETURN_NOT_OK(options.ConfigureAccountKeyCredentials(account_name, account_key)); + return options; + } + return Status::Cancelled( + "Connection details not provided for a real flat namespace " + "account."); + } +}; + +class AzureHierarchicalNamespaceFileSystemTest : public AzureFileSystemTest { + Result MakeOptions() override { + AzureOptions options; + const auto account_key = std::getenv("AZURE_HIERARCHICAL_NAMESPACE_ACCOUNT_KEY"); + const auto account_name = std::getenv("AZURE_HIERARCHICAL_NAMESPACE_ACCOUNT_NAME"); + if (account_key && account_name) { + RETURN_NOT_OK(options.ConfigureAccountKeyCredentials(account_name, account_key)); + return options; + } + return Status::Cancelled( + "Connection details not provided for a real hierarchical namespace " + "account."); + } +}; + +TEST_F(AzureFlatNamespaceFileSystemTest, DetectHierarchicalNamespace) { + auto hierarchical_namespace = internal::HierarchicalNamespaceDetector(); + ASSERT_OK(hierarchical_namespace.Init(datalake_service_client_.get())); + ASSERT_OK_AND_EQ(false, hierarchical_namespace.Enabled(PreexistingContainerName())); +} + +TEST_F(AzureHierarchicalNamespaceFileSystemTest, DetectHierarchicalNamespace) { + auto hierarchical_namespace = internal::HierarchicalNamespaceDetector(); + ASSERT_OK(hierarchical_namespace.Init(datalake_service_client_.get())); + ASSERT_OK_AND_EQ(true, hierarchical_namespace.Enabled(PreexistingContainerName())); +} + +TEST_F(AzuriteFileSystemTest, DetectHierarchicalNamespace) { + auto hierarchical_namespace = internal::HierarchicalNamespaceDetector(); + ASSERT_OK(hierarchical_namespace.Init(datalake_service_client_.get())); + ASSERT_OK_AND_EQ(false, hierarchical_namespace.Enabled(PreexistingContainerName())); +} + +TEST_F(AzuriteFileSystemTest, DetectHierarchicalNamespaceFailsWithMissingContainer) { + auto hierarchical_namespace = internal::HierarchicalNamespaceDetector(); + ASSERT_OK(hierarchical_namespace.Init(datalake_service_client_.get())); + ASSERT_NOT_OK(hierarchical_namespace.Enabled("non-existent-container")); +} + +TEST_F(AzuriteFileSystemTest, GetFileInfoAccount) { + AssertFileInfo(fs_.get(), "", FileType::Directory); + + // URI + ASSERT_RAISES(Invalid, fs_->GetFileInfo("abfs://")); +} + +TEST_F(AzuriteFileSystemTest, GetFileInfoContainer) { + AssertFileInfo(fs_.get(), PreexistingContainerName(), FileType::Directory); + + AssertFileInfo(fs_.get(), "non-existent-container", FileType::NotFound); + + // URI + ASSERT_RAISES(Invalid, fs_->GetFileInfo("abfs://" + PreexistingContainerName())); +} + +void AzureFileSystemTest::RunGetFileInfoObjectWithNestedStructureTest() { + // Adds detailed tests to handle cases of different edge cases + // with directory naming conventions (e.g. with and without slashes). + constexpr auto kObjectName = "test-object-dir/some_other_dir/another_dir/foo"; + // TODO(GH-38333): Switch to using Azure filesystem to write once its implemented. + blob_service_client_->GetBlobContainerClient(PreexistingContainerName()) + .GetBlockBlobClient(kObjectName) + .UploadFrom(reinterpret_cast(kLoremIpsum), strlen(kLoremIpsum)); + + // 0 is immediately after "/" lexicographically, ensure that this doesn't + // cause unexpected issues. + // TODO(GH-38333): Switch to using Azure filesystem to write once its implemented. + blob_service_client_->GetBlobContainerClient(PreexistingContainerName()) + .GetBlockBlobClient("test-object-dir/some_other_dir0") + .UploadFrom(reinterpret_cast(kLoremIpsum), strlen(kLoremIpsum)); + + blob_service_client_->GetBlobContainerClient(PreexistingContainerName()) + .GetBlockBlobClient(std::string(kObjectName) + "0") + .UploadFrom(reinterpret_cast(kLoremIpsum), strlen(kLoremIpsum)); + + AssertFileInfo(fs_.get(), PreexistingContainerPath() + kObjectName, FileType::File); + AssertFileInfo(fs_.get(), PreexistingContainerPath() + kObjectName + "/", + FileType::NotFound); + AssertFileInfo(fs_.get(), PreexistingContainerPath() + "test-object-dir", + FileType::Directory); + AssertFileInfo(fs_.get(), PreexistingContainerPath() + "test-object-dir/", + FileType::Directory); + AssertFileInfo(fs_.get(), PreexistingContainerPath() + "test-object-dir/some_other_dir", + FileType::Directory); + AssertFileInfo(fs_.get(), + PreexistingContainerPath() + "test-object-dir/some_other_dir/", + FileType::Directory); + + AssertFileInfo(fs_.get(), PreexistingContainerPath() + "test-object-di", + FileType::NotFound); + AssertFileInfo(fs_.get(), PreexistingContainerPath() + "test-object-dir/some_other_di", + FileType::NotFound); +} + +TEST_F(AzuriteFileSystemTest, GetFileInfoObjectWithNestedStructure) { + RunGetFileInfoObjectWithNestedStructureTest(); +} + +TEST_F(AzureHierarchicalNamespaceFileSystemTest, GetFileInfoObjectWithNestedStructure) { + RunGetFileInfoObjectWithNestedStructureTest(); + datalake_service_client_->GetFileSystemClient(PreexistingContainerName()) + .GetDirectoryClient("test-empty-object-dir") + .Create(); + + AssertFileInfo(fs_.get(), PreexistingContainerPath() + "test-empty-object-dir", + FileType::Directory); +} + +void AzureFileSystemTest::RunGetFileInfoObjectTest() { + auto object_properties = + blob_service_client_->GetBlobContainerClient(PreexistingContainerName()) + .GetBlobClient(PreexistingObjectName()) + .GetProperties() + .Value; + + AssertFileInfo(fs_.get(), PreexistingObjectPath(), FileType::File, + std::chrono::system_clock::time_point(object_properties.LastModified), + static_cast(object_properties.BlobSize)); + + // URI + ASSERT_RAISES(Invalid, fs_->GetFileInfo("abfs://" + PreexistingObjectName())); +} + +TEST_F(AzuriteFileSystemTest, GetFileInfoObject) { RunGetFileInfoObjectTest(); } + +TEST_F(AzureHierarchicalNamespaceFileSystemTest, GetFileInfoObject) { + RunGetFileInfoObjectTest(); +} + +TEST_F(AzuriteFileSystemTest, OpenInputStreamString) { std::shared_ptr stream; ASSERT_OK_AND_ASSIGN(stream, fs_->OpenInputStream(PreexistingObjectPath())); @@ -232,7 +404,7 @@ TEST_F(TestAzureFileSystem, OpenInputStreamString) { EXPECT_EQ(buffer->ToString(), kLoremIpsum); } -TEST_F(TestAzureFileSystem, OpenInputStreamStringBuffers) { +TEST_F(AzuriteFileSystemTest, OpenInputStreamStringBuffers) { std::shared_ptr stream; ASSERT_OK_AND_ASSIGN(stream, fs_->OpenInputStream(PreexistingObjectPath())); @@ -246,10 +418,8 @@ TEST_F(TestAzureFileSystem, OpenInputStreamStringBuffers) { EXPECT_EQ(contents, kLoremIpsum); } -TEST_F(TestAzureFileSystem, OpenInputStreamInfo) { - // TODO(GH-38335): When implemented use ASSERT_OK_AND_ASSIGN(info, - // fs->GetFileInfo(PreexistingObjectPath())); - arrow::fs::FileInfo info(PreexistingObjectPath(), FileType::File); +TEST_F(AzuriteFileSystemTest, OpenInputStreamInfo) { + ASSERT_OK_AND_ASSIGN(auto info, fs_->GetFileInfo(PreexistingObjectPath())); std::shared_ptr stream; ASSERT_OK_AND_ASSIGN(stream, fs_->OpenInputStream(info)); @@ -258,10 +428,10 @@ TEST_F(TestAzureFileSystem, OpenInputStreamInfo) { EXPECT_EQ(buffer->ToString(), kLoremIpsum); } -TEST_F(TestAzureFileSystem, OpenInputStreamEmpty) { +TEST_F(AzuriteFileSystemTest, OpenInputStreamEmpty) { const auto path_to_file = "empty-object.txt"; const auto path = PreexistingContainerPath() + path_to_file; - service_client_->GetBlobContainerClient(PreexistingContainerName()) + blob_service_client_->GetBlobContainerClient(PreexistingContainerName()) .GetBlockBlobClient(path_to_file) .UploadFrom(nullptr, 0); @@ -272,48 +442,92 @@ TEST_F(TestAzureFileSystem, OpenInputStreamEmpty) { EXPECT_EQ(size, 0); } -TEST_F(TestAzureFileSystem, OpenInputStreamNotFound) { +TEST_F(AzuriteFileSystemTest, OpenInputStreamNotFound) { ASSERT_RAISES(IOError, fs_->OpenInputStream(NotFoundObjectPath())); } -TEST_F(TestAzureFileSystem, OpenInputStreamInfoInvalid) { - // TODO(GH-38335): When implemented use ASSERT_OK_AND_ASSIGN(info, - // fs->GetFileInfo(PreexistingBucketPath())); - arrow::fs::FileInfo info(PreexistingContainerPath(), FileType::Directory); +TEST_F(AzuriteFileSystemTest, OpenInputStreamInfoInvalid) { + ASSERT_OK_AND_ASSIGN(auto info, fs_->GetFileInfo(PreexistingContainerPath())); ASSERT_RAISES(IOError, fs_->OpenInputStream(info)); - // TODO(GH-38335): When implemented use ASSERT_OK_AND_ASSIGN(info, - // fs->GetFileInfo(NotFoundObjectPath())); - arrow::fs::FileInfo info2(PreexistingContainerPath(), FileType::NotFound); + ASSERT_OK_AND_ASSIGN(auto info2, fs_->GetFileInfo(NotFoundObjectPath())); ASSERT_RAISES(IOError, fs_->OpenInputStream(info2)); } -TEST_F(TestAzureFileSystem, OpenInputStreamUri) { +TEST_F(AzuriteFileSystemTest, OpenInputStreamUri) { ASSERT_RAISES(Invalid, fs_->OpenInputStream("abfss://" + PreexistingObjectPath())); } -TEST_F(TestAzureFileSystem, OpenInputStreamTrailingSlash) { +TEST_F(AzuriteFileSystemTest, OpenInputStreamTrailingSlash) { ASSERT_RAISES(IOError, fs_->OpenInputStream(PreexistingObjectPath() + '/')); } -TEST_F(TestAzureFileSystem, OpenInputStreamReadMetadata) { - const std::string object_name = "OpenInputStreamMetadataTest/simple.txt"; - - service_client_->GetBlobContainerClient(PreexistingContainerName()) - .GetBlobClient(PreexistingObjectName()) - .SetMetadata(Azure::Storage::Metadata{{"key0", "value0"}}); +namespace { +std::shared_ptr NormalizerKeyValueMetadata( + std::shared_ptr metadata) { + auto normalized = std::make_shared(); + for (int64_t i = 0; i < metadata->size(); ++i) { + auto key = metadata->key(i); + auto value = metadata->value(i); + if (key == "Content-Hash") { + std::vector output; + output.reserve(value.size() / 2); + if (ParseHexValues(value, output.data()).ok()) { + // Valid value + value = std::string(value.size(), 'F'); + } + } else if (key == "Last-Modified" || key == "Created-On" || + key == "Access-Tier-Changed-On") { + auto parser = TimestampParser::MakeISO8601(); + int64_t output; + if ((*parser)(value.data(), value.size(), TimeUnit::NANO, &output)) { + // Valid value + value = "2023-10-31T08:15:20Z"; + } + } else if (key == "ETag") { + if (arrow::internal::StartsWith(value, "\"") && + arrow::internal::EndsWith(value, "\"")) { + // Valid value + value = "\"ETagValue\""; + } + } + normalized->Append(key, value); + } + return normalized; +} +}; // namespace +TEST_F(AzuriteFileSystemTest, OpenInputStreamReadMetadata) { std::shared_ptr stream; ASSERT_OK_AND_ASSIGN(stream, fs_->OpenInputStream(PreexistingObjectPath())); std::shared_ptr actual; ASSERT_OK_AND_ASSIGN(actual, stream->ReadMetadata()); - // TODO(GH-38330): This is asserting that the user defined metadata is returned but this - // is probably not the correct behaviour. - ASSERT_OK_AND_EQ("value0", actual->Get("key0")); + ASSERT_EQ( + "\n" + "-- metadata --\n" + "Content-Type: application/octet-stream\n" + "Content-Encoding: \n" + "Content-Language: \n" + "Content-Hash: FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF\n" + "Content-Disposition: \n" + "Cache-Control: \n" + "Last-Modified: 2023-10-31T08:15:20Z\n" + "Created-On: 2023-10-31T08:15:20Z\n" + "Blob-Type: BlockBlob\n" + "Lease-State: available\n" + "Lease-Status: unlocked\n" + "Content-Length: 447\n" + "ETag: \"ETagValue\"\n" + "IsServerEncrypted: true\n" + "Access-Tier: Hot\n" + "Is-Access-Tier-Inferred: true\n" + "Access-Tier-Changed-On: 2023-10-31T08:15:20Z\n" + "Has-Legal-Hold: false", + NormalizerKeyValueMetadata(actual)->ToString()); } -TEST_F(TestAzureFileSystem, OpenInputStreamClosed) { +TEST_F(AzuriteFileSystemTest, OpenInputStreamClosed) { ASSERT_OK_AND_ASSIGN(auto stream, fs_->OpenInputStream(PreexistingObjectPath())); ASSERT_OK(stream->Close()); std::array buffer{}; @@ -322,7 +536,7 @@ TEST_F(TestAzureFileSystem, OpenInputStreamClosed) { ASSERT_RAISES(Invalid, stream->Tell()); } -TEST_F(TestAzureFileSystem, OpenInputFileMixedReadVsReadAt) { +TEST_F(AzuriteFileSystemTest, OpenInputFileMixedReadVsReadAt) { // Create a file large enough to make the random access tests non-trivial. auto constexpr kLineWidth = 100; auto constexpr kLineCount = 4096; @@ -368,7 +582,7 @@ TEST_F(TestAzureFileSystem, OpenInputFileMixedReadVsReadAt) { } } -TEST_F(TestAzureFileSystem, OpenInputFileRandomSeek) { +TEST_F(AzuriteFileSystemTest, OpenInputFileRandomSeek) { // Create a file large enough to make the random access tests non-trivial. auto constexpr kLineWidth = 100; auto constexpr kLineCount = 4096; @@ -396,14 +610,15 @@ TEST_F(TestAzureFileSystem, OpenInputFileRandomSeek) { } } -TEST_F(TestAzureFileSystem, OpenInputFileIoContext) { +TEST_F(AzuriteFileSystemTest, OpenInputFileIoContext) { // Create a test file. const auto path_to_file = "OpenInputFileIoContext/object-name"; const auto path = PreexistingContainerPath() + path_to_file; const std::string contents = "The quick brown fox jumps over the lazy dog"; - auto blob_client = service_client_->GetBlobContainerClient(PreexistingContainerName()) - .GetBlockBlobClient(path_to_file); + auto blob_client = + blob_service_client_->GetBlobContainerClient(PreexistingContainerName()) + .GetBlockBlobClient(path_to_file); blob_client.UploadFrom(reinterpret_cast(contents.data()), contents.length()); @@ -412,10 +627,8 @@ TEST_F(TestAzureFileSystem, OpenInputFileIoContext) { EXPECT_EQ(fs_->io_context().external_id(), file->io_context().external_id()); } -TEST_F(TestAzureFileSystem, OpenInputFileInfo) { - // TODO(GH-38335): When implemented use ASSERT_OK_AND_ASSIGN(info, - // fs->GetFileInfo(PreexistingObjectPath())); - arrow::fs::FileInfo info(PreexistingObjectPath(), FileType::File); +TEST_F(AzuriteFileSystemTest, OpenInputFileInfo) { + ASSERT_OK_AND_ASSIGN(auto info, fs_->GetFileInfo(PreexistingObjectPath())); std::shared_ptr file; ASSERT_OK_AND_ASSIGN(file, fs_->OpenInputFile(info)); @@ -429,23 +642,19 @@ TEST_F(TestAzureFileSystem, OpenInputFileInfo) { EXPECT_EQ(std::string(buffer.data(), size), expected); } -TEST_F(TestAzureFileSystem, OpenInputFileNotFound) { +TEST_F(AzuriteFileSystemTest, OpenInputFileNotFound) { ASSERT_RAISES(IOError, fs_->OpenInputFile(NotFoundObjectPath())); } -TEST_F(TestAzureFileSystem, OpenInputFileInfoInvalid) { - // TODO(GH-38335): When implemented use ASSERT_OK_AND_ASSIGN(info, - // fs->GetFileInfo(PreexistingContainerPath())); - arrow::fs::FileInfo info(PreexistingContainerPath(), FileType::File); +TEST_F(AzuriteFileSystemTest, OpenInputFileInfoInvalid) { + ASSERT_OK_AND_ASSIGN(auto info, fs_->GetFileInfo(PreexistingContainerPath())); ASSERT_RAISES(IOError, fs_->OpenInputFile(info)); - // TODO(GH-38335): When implemented use ASSERT_OK_AND_ASSIGN(info, - // fs->GetFileInfo(NotFoundObjectPath())); - arrow::fs::FileInfo info2(NotFoundObjectPath(), FileType::NotFound); + ASSERT_OK_AND_ASSIGN(auto info2, fs_->GetFileInfo(NotFoundObjectPath())); ASSERT_RAISES(IOError, fs_->OpenInputFile(info2)); } -TEST_F(TestAzureFileSystem, OpenInputFileClosed) { +TEST_F(AzuriteFileSystemTest, OpenInputFileClosed) { ASSERT_OK_AND_ASSIGN(auto stream, fs_->OpenInputFile(PreexistingObjectPath())); ASSERT_OK(stream->Close()); std::array buffer{}; diff --git a/cpp/src/arrow/filesystem/path_util.cc b/cpp/src/arrow/filesystem/path_util.cc index 90af3c66ff8d4..46ea436a9f31a 100644 --- a/cpp/src/arrow/filesystem/path_util.cc +++ b/cpp/src/arrow/filesystem/path_util.cc @@ -191,18 +191,15 @@ std::string_view RemoveLeadingSlash(std::string_view key) { } Status AssertNoTrailingSlash(std::string_view key) { - if (key.back() == '/') { + if (HasTrailingSlash(key)) { return NotAFile(key); } return Status::OK(); } -bool HasLeadingSlash(std::string_view key) { - if (key.front() != '/') { - return false; - } - return true; -} +bool HasTrailingSlash(std::string_view key) { return key.back() == '/'; } + +bool HasLeadingSlash(std::string_view key) { return key.front() == '/'; } Result MakeAbstractPathRelative(const std::string& base, const std::string& path) { diff --git a/cpp/src/arrow/filesystem/path_util.h b/cpp/src/arrow/filesystem/path_util.h index 13a74b7fa12c8..2c8c123e779f4 100644 --- a/cpp/src/arrow/filesystem/path_util.h +++ b/cpp/src/arrow/filesystem/path_util.h @@ -94,6 +94,9 @@ std::string_view RemoveTrailingSlash(std::string_view s, bool preserve_root = fa ARROW_EXPORT Status AssertNoTrailingSlash(std::string_view s); +ARROW_EXPORT +bool HasTrailingSlash(std::string_view s); + ARROW_EXPORT bool HasLeadingSlash(std::string_view s); diff --git a/cpp/src/arrow/flight/CMakeLists.txt b/cpp/src/arrow/flight/CMakeLists.txt index c37d2c56700e0..91e0fbf9136d3 100644 --- a/cpp/src/arrow/flight/CMakeLists.txt +++ b/cpp/src/arrow/flight/CMakeLists.txt @@ -67,7 +67,6 @@ list(APPEND Boost::headers Boost::filesystem Boost::system - ${ARROW_GTEST_GTEST} ${ARROW_GTEST_GMOCK}) list(APPEND ARROW_FLIGHT_TEST_LINK_LIBS gRPC::grpc++) diff --git a/cpp/src/arrow/flight/integration_tests/CMakeLists.txt b/cpp/src/arrow/flight/integration_tests/CMakeLists.txt index 98a7a2a7af386..7ac314531ff4d 100644 --- a/cpp/src/arrow/flight/integration_tests/CMakeLists.txt +++ b/cpp/src/arrow/flight/integration_tests/CMakeLists.txt @@ -22,11 +22,8 @@ if(ARROW_FLIGHT_TEST_LINKAGE STREQUAL "static" AND ARROW_BUILD_STATIC) else() set(ARROW_FLIGHT_INTEGRATION_TEST_LINK_LIBS arrow_flight_sql_shared) endif() -list(APPEND - ARROW_FLIGHT_INTEGRATION_TEST_LINK_LIBS - ${ARROW_FLIGHT_TEST_LINK_LIBS} - ${GFLAGS_LIBRARIES} - ${ARROW_GTEST_GTEST}) +list(APPEND ARROW_FLIGHT_INTEGRATION_TEST_LINK_LIBS ${ARROW_FLIGHT_TEST_LINK_LIBS} + ${GFLAGS_LIBRARIES}) add_executable(flight-test-integration-server test_integration_server.cc test_integration.cc) diff --git a/cpp/src/arrow/flight/perf_server.cc b/cpp/src/arrow/flight/perf_server.cc index 40f6cbcbf0d82..87676da11213d 100644 --- a/cpp/src/arrow/flight/perf_server.cc +++ b/cpp/src/arrow/flight/perf_server.cc @@ -189,7 +189,7 @@ class FlightPerfServer : public FlightServerBase { (void)token.SerializeToString(&tmp_ticket.ticket); // All endpoints same location for now - endpoints.push_back(FlightEndpoint{tmp_ticket, {location_}, std::nullopt}); + endpoints.push_back(FlightEndpoint{tmp_ticket, {location_}, std::nullopt, ""}); } uint64_t total_records = diff --git a/cpp/src/arrow/gpu/CMakeLists.txt b/cpp/src/arrow/gpu/CMakeLists.txt index a5b176793e495..7238a0e0b7c9b 100644 --- a/cpp/src/arrow/gpu/CMakeLists.txt +++ b/cpp/src/arrow/gpu/CMakeLists.txt @@ -100,9 +100,10 @@ if(ARROW_BUILD_TESTS) endif() if(ARROW_BUILD_BENCHMARKS) - add_arrow_benchmark(cuda_benchmark PREFIX "arrow-gpu") - target_link_libraries(arrow-gpu-cuda-benchmark - PUBLIC ${ARROW_CUDA_LIBRARY} ${ARROW_GTEST_GTEST} - ${ARROW_BENCHMARK_LINK_LIBS}) + add_arrow_benchmark(cuda_benchmark + PREFIX + "arrow-gpu" + EXTRA_LINK_LIBS + ${ARROW_CUDA_LIBRARY}) add_dependencies(arrow_cuda-benchmarks arrow-gpu-cuda-benchmark) endif() diff --git a/cpp/src/arrow/integration/json_internal.cc b/cpp/src/arrow/integration/json_internal.cc index ed7be4b502985..59749c36a958e 100644 --- a/cpp/src/arrow/integration/json_internal.cc +++ b/cpp/src/arrow/integration/json_internal.cc @@ -48,6 +48,7 @@ #include "arrow/util/key_value_metadata.h" #include "arrow/util/logging.h" #include "arrow/util/range.h" +#include "arrow/util/span.h" #include "arrow/util/string.h" #include "arrow/util/value_parsing.h" #include "arrow/visit_array_inline.h" @@ -106,6 +107,13 @@ std::string GetTimeUnitName(TimeUnit::type unit) { return "UNKNOWN"; } +Result GetStringView(const rj::Value& str) { + if (!str.IsString()) { + return Status::Invalid("field was not a string"); + } + return std::string_view{str.GetString(), str.GetStringLength()}; +} + class SchemaWriter { public: explicit SchemaWriter(const Schema& schema, const DictionaryFieldMapper& mapper, @@ -226,8 +234,9 @@ class SchemaWriter { template enable_if_t::value || is_primitive_ctype::value || - is_base_binary_type::value || is_var_length_list_type::value || - is_struct_type::value || is_run_end_encoded_type::value> + is_base_binary_type::value || is_binary_view_like_type::value || + is_var_length_list_type::value || is_struct_type::value || + is_run_end_encoded_type::value> WriteTypeMetadata(const T& type) {} void WriteTypeMetadata(const MapType& type) { @@ -382,6 +391,8 @@ class SchemaWriter { Status Visit(const TimeType& type) { return WritePrimitive("time", type); } Status Visit(const StringType& type) { return WriteVarBytes("utf8", type); } Status Visit(const BinaryType& type) { return WriteVarBytes("binary", type); } + Status Visit(const StringViewType& type) { return WritePrimitive("utf8view", type); } + Status Visit(const BinaryViewType& type) { return WritePrimitive("binaryview", type); } Status Visit(const LargeStringType& type) { return WriteVarBytes("largeutf8", type); } Status Visit(const LargeBinaryType& type) { return WriteVarBytes("largebinary", type); } Status Visit(const FixedSizeBinaryType& type) { @@ -528,22 +539,19 @@ class ArrayWriter { } } - // Binary, encode to hexadecimal. - template - enable_if_binary_like WriteDataValues( - const ArrayType& arr) { - for (int64_t i = 0; i < arr.length(); ++i) { - writer_->String(HexEncode(arr.GetView(i))); - } - } - - // UTF8 string, write as is - template - enable_if_string_like WriteDataValues( - const ArrayType& arr) { + template + std::enable_if_t::value || + is_fixed_size_binary_type::value> + WriteDataValues(const ArrayType& arr) { for (int64_t i = 0; i < arr.length(); ++i) { - auto view = arr.GetView(i); - writer_->String(view.data(), static_cast(view.size())); + if constexpr (Type::is_utf8) { + // UTF8 string, write as is + auto view = arr.GetView(i); + writer_->String(view.data(), static_cast(view.size())); + } else { + // Binary, encode to hexadecimal. + writer_->String(HexEncode(arr.GetView(i))); + } } } @@ -642,6 +650,50 @@ class ArrayWriter { writer_->EndArray(); } + template + void WriteBinaryViewField(const ArrayType& array) { + writer_->Key("VIEWS"); + writer_->StartArray(); + for (int64_t i = 0; i < array.length(); ++i) { + auto s = array.raw_values()[i]; + writer_->StartObject(); + writer_->Key("SIZE"); + writer_->Int64(s.size()); + if (s.is_inline()) { + writer_->Key("INLINED"); + if constexpr (ArrayType::TypeClass::is_utf8) { + writer_->String(reinterpret_cast(s.inline_data()), s.size()); + } else { + writer_->String(HexEncode(s.inline_data(), s.size())); + } + } else { + // Prefix is always 4 bytes so it may not be utf-8 even if the whole + // string view is + writer_->Key("PREFIX_HEX"); + writer_->String(HexEncode(s.inline_data(), BinaryViewType::kPrefixSize)); + writer_->Key("BUFFER_INDEX"); + writer_->Int64(s.ref.buffer_index); + writer_->Key("OFFSET"); + writer_->Int64(s.ref.offset); + } + writer_->EndObject(); + } + writer_->EndArray(); + } + + void WriteVariadicBuffersField(const BinaryViewArray& arr) { + writer_->Key("VARIADIC_DATA_BUFFERS"); + writer_->StartArray(); + const auto& buffers = arr.data()->buffers; + for (size_t i = 2; i < buffers.size(); ++i) { + // Encode the data buffers into hexadecimal strings. + // Even for arrays which contain utf-8, portions of the buffer not + // referenced by any view may be invalid. + writer_->String(buffers[i]->ToHexString()); + } + writer_->EndArray(); + } + void WriteValidityField(const Array& arr) { writer_->Key("VALIDITY"); writer_->StartArray(); @@ -682,8 +734,10 @@ class ArrayWriter { } template - enable_if_t::value, Status> Visit( - const ArrayType& array) { + enable_if_t::value && + !is_binary_view_like_type::value, + Status> + Visit(const ArrayType& array) { WriteValidityField(array); WriteDataField(array); SetNoChildren(); @@ -700,6 +754,17 @@ class ArrayWriter { return Status::OK(); } + template + enable_if_binary_view_like Visit( + const ArrayType& array) { + WriteValidityField(array); + WriteBinaryViewField(array); + WriteVariadicBuffersField(array); + + SetNoChildren(); + return Status::OK(); + } + Status Visit(const DictionaryArray& array) { return VisitArrayValues(*array.indices()); } @@ -1033,6 +1098,10 @@ Result> GetType(const RjObject& json_type, return utf8(); } else if (type_name == "binary") { return binary(); + } else if (type_name == "utf8view") { + return utf8_view(); + } else if (type_name == "binaryview") { + return binary_view(); } else if (type_name == "largeutf8") { return large_utf8(); } else if (type_name == "largebinary") { @@ -1246,10 +1315,12 @@ class ArrayReader { return Status::OK(); } - Result GetDataArray(const RjObject& obj) { - ARROW_ASSIGN_OR_RAISE(const auto json_data_arr, GetMemberArray(obj, kData)); + Result GetDataArray(const RjObject& obj, + const std::string& key = kData) { + ARROW_ASSIGN_OR_RAISE(const auto json_data_arr, GetMemberArray(obj, key)); if (static_cast(json_data_arr.Size()) != length_) { - return Status::Invalid("JSON DATA array size differs from advertised array length"); + return Status::Invalid("JSON ", key, " array size ", json_data_arr.Size(), + " differs from advertised array length ", length_); } return json_data_arr; } @@ -1293,10 +1364,7 @@ class ArrayReader { RETURN_NOT_OK(builder.AppendNull()); continue; } - - DCHECK(json_val.IsString()); - std::string_view val{ - json_val.GetString()}; // XXX can we use json_val.GetStringLength()? + ARROW_ASSIGN_OR_RAISE(auto val, GetStringView(json_val)); int64_t offset_start = ParseOffset(json_offsets[i]); int64_t offset_end = ParseOffset(json_offsets[i + 1]); @@ -1332,6 +1400,97 @@ class ArrayReader { return FinishBuilder(&builder); } + template + enable_if_binary_view_like Visit(const ViewType& type) { + ARROW_ASSIGN_OR_RAISE(const auto json_views, GetDataArray(obj_, "VIEWS")); + ARROW_ASSIGN_OR_RAISE(const auto json_variadic_bufs, + GetMemberArray(obj_, "VARIADIC_DATA_BUFFERS")); + + using internal::Zip; + using util::span; + + BufferVector buffers; + buffers.resize(json_variadic_bufs.Size() + 2); + for (auto [json_buf, buf] : Zip(json_variadic_bufs, span{buffers}.subspan(2))) { + ARROW_ASSIGN_OR_RAISE(auto hex_string, GetStringView(json_buf)); + ARROW_ASSIGN_OR_RAISE( + buf, AllocateBuffer(static_cast(hex_string.size()) / 2, pool_)); + RETURN_NOT_OK(ParseHexValues(hex_string, buf->mutable_data())); + } + + TypedBufferBuilder validity_builder{pool_}; + RETURN_NOT_OK(validity_builder.Resize(length_)); + for (bool is_valid : is_valid_) { + validity_builder.UnsafeAppend(is_valid); + } + ARROW_ASSIGN_OR_RAISE(buffers[0], validity_builder.Finish()); + + ARROW_ASSIGN_OR_RAISE( + buffers[1], AllocateBuffer(length_ * sizeof(BinaryViewType::c_type), pool_)); + + span views{buffers[1]->mutable_data_as(), + static_cast(length_)}; + + int64_t null_count = 0; + for (auto [json_view, out_view, is_valid] : Zip(json_views, views, is_valid_)) { + if (!is_valid) { + out_view = {}; + ++null_count; + continue; + } + + DCHECK(json_view.IsObject()); + const auto& json_view_obj = json_view.GetObject(); + + auto json_size = json_view_obj.FindMember("SIZE"); + RETURN_NOT_INT("SIZE", json_size, json_view_obj); + DCHECK_GE(json_size->value.GetInt64(), 0); + auto size = static_cast(json_size->value.GetInt64()); + + if (size <= BinaryViewType::kInlineSize) { + auto json_inlined = json_view_obj.FindMember("INLINED"); + RETURN_NOT_STRING("INLINED", json_inlined, json_view_obj); + out_view.inlined = {size, {}}; + + if constexpr (ViewType::is_utf8) { + DCHECK_LE(json_inlined->value.GetStringLength(), BinaryViewType::kInlineSize); + memcpy(&out_view.inlined.data, json_inlined->value.GetString(), size); + } else { + DCHECK_LE(json_inlined->value.GetStringLength(), + BinaryViewType::kInlineSize * 2); + ARROW_ASSIGN_OR_RAISE(auto inlined, GetStringView(json_inlined->value)); + RETURN_NOT_OK(ParseHexValues(inlined, out_view.inlined.data.data())); + } + continue; + } + + auto json_prefix = json_view_obj.FindMember("PREFIX_HEX"); + auto json_buffer_index = json_view_obj.FindMember("BUFFER_INDEX"); + auto json_offset = json_view_obj.FindMember("OFFSET"); + RETURN_NOT_STRING("PREFIX_HEX", json_prefix, json_view_obj); + RETURN_NOT_INT("BUFFER_INDEX", json_buffer_index, json_view_obj); + RETURN_NOT_INT("OFFSET", json_offset, json_view_obj); + + out_view.ref = { + size, + {}, + static_cast(json_buffer_index->value.GetInt64()), + static_cast(json_offset->value.GetInt64()), + }; + + DCHECK_EQ(json_prefix->value.GetStringLength(), BinaryViewType::kPrefixSize * 2); + ARROW_ASSIGN_OR_RAISE(auto prefix, GetStringView(json_prefix->value)); + RETURN_NOT_OK(ParseHexValues(prefix, out_view.ref.prefix.data())); + + DCHECK_LE(static_cast(out_view.ref.buffer_index), buffers.size() - 2); + DCHECK_LE(static_cast(out_view.ref.offset) + out_view.size(), + buffers[out_view.ref.buffer_index + 2]->size()); + } + + data_ = ArrayData::Make(type_, length_, std::move(buffers), null_count); + return Status::OK(); + } + Status Visit(const DayTimeIntervalType& type) { DayTimeIntervalBuilder builder(pool_); diff --git a/cpp/src/arrow/ipc/feather.cc b/cpp/src/arrow/ipc/feather.cc index b6d3a3d7d8cbb..1ef076fac40e2 100644 --- a/cpp/src/arrow/ipc/feather.cc +++ b/cpp/src/arrow/ipc/feather.cc @@ -536,8 +536,8 @@ struct ArrayWriterV1 { is_nested_type::value || is_null_type::value || is_decimal_type::value || std::is_same::value || is_duration_type::value || is_interval_type::value || is_fixed_size_binary_type::value || - std::is_same::value || std::is_same::value || - std::is_same::value, + is_binary_view_like_type::value || std::is_same::value || + std::is_same::value || std::is_same::value, Status>::type Visit(const T& type) { return Status::NotImplemented(type.ToString()); diff --git a/cpp/src/arrow/ipc/feather_test.cc b/cpp/src/arrow/ipc/feather_test.cc index e1d4282cb2635..0b6ae4f620647 100644 --- a/cpp/src/arrow/ipc/feather_test.cc +++ b/cpp/src/arrow/ipc/feather_test.cc @@ -264,7 +264,8 @@ TEST_P(TestFeather, TimeTypes) { TEST_P(TestFeather, VLenPrimitiveRoundTrip) { std::shared_ptr batch; - ASSERT_OK(ipc::test::MakeStringTypesRecordBatch(&batch)); + ASSERT_OK(ipc::test::MakeStringTypesRecordBatch(&batch, /*with_nulls=*/true, + /*with_view_types=*/false)); CheckRoundtrip(batch); } @@ -306,7 +307,8 @@ TEST_P(TestFeather, SliceFloatRoundTrip) { TEST_P(TestFeather, SliceStringsRoundTrip) { std::shared_ptr batch; - ASSERT_OK(ipc::test::MakeStringTypesRecordBatch(&batch, /*with_nulls=*/true)); + ASSERT_OK(ipc::test::MakeStringTypesRecordBatch(&batch, /*with_nulls=*/true, + /*with_view_types=*/false)); CheckSlices(batch); } diff --git a/cpp/src/arrow/ipc/json_simple.cc b/cpp/src/arrow/ipc/json_simple.cc index eea0c9730283e..4d2d803f3f65e 100644 --- a/cpp/src/arrow/ipc/json_simple.cc +++ b/cpp/src/arrow/ipc/json_simple.cc @@ -847,6 +847,8 @@ Status GetDictConverter(const std::shared_ptr& type, PARAM_CONVERTER_CASE(Type::BINARY, StringConverter, BinaryType) PARAM_CONVERTER_CASE(Type::LARGE_STRING, StringConverter, LargeStringType) PARAM_CONVERTER_CASE(Type::LARGE_BINARY, StringConverter, LargeBinaryType) + PARAM_CONVERTER_CASE(Type::STRING_VIEW, StringConverter, StringViewType) + PARAM_CONVERTER_CASE(Type::BINARY_VIEW, StringConverter, BinaryViewType) SIMPLE_CONVERTER_CASE(Type::FIXED_SIZE_BINARY, FixedSizeBinaryConverter, FixedSizeBinaryType) SIMPLE_CONVERTER_CASE(Type::DECIMAL128, Decimal128Converter, Decimal128Type) @@ -905,6 +907,8 @@ Status GetConverter(const std::shared_ptr& type, SIMPLE_CONVERTER_CASE(Type::BINARY, StringConverter) SIMPLE_CONVERTER_CASE(Type::LARGE_STRING, StringConverter) SIMPLE_CONVERTER_CASE(Type::LARGE_BINARY, StringConverter) + SIMPLE_CONVERTER_CASE(Type::STRING_VIEW, StringConverter) + SIMPLE_CONVERTER_CASE(Type::BINARY_VIEW, StringConverter) SIMPLE_CONVERTER_CASE(Type::FIXED_SIZE_BINARY, FixedSizeBinaryConverter<>) SIMPLE_CONVERTER_CASE(Type::DECIMAL128, Decimal128Converter<>) SIMPLE_CONVERTER_CASE(Type::DECIMAL256, Decimal256Converter<>) diff --git a/cpp/src/arrow/ipc/json_simple_test.cc b/cpp/src/arrow/ipc/json_simple_test.cc index 6eee5955242aa..b67c26999945b 100644 --- a/cpp/src/arrow/ipc/json_simple_test.cc +++ b/cpp/src/arrow/ipc/json_simple_test.cc @@ -271,7 +271,13 @@ INSTANTIATE_TYPED_TEST_SUITE_P(TestHalfFloat, TestIntegers, HalfFloatType); template class TestStrings : public ::testing::Test { public: - std::shared_ptr type() { return TypeTraits::type_singleton(); } + std::shared_ptr type() const { + if constexpr (is_binary_view_like_type::value) { + return T::is_utf8 ? utf8_view() : binary_view(); + } else { + return TypeTraits::type_singleton(); + } + } }; TYPED_TEST_SUITE_P(TestStrings); @@ -327,6 +333,8 @@ INSTANTIATE_TYPED_TEST_SUITE_P(TestString, TestStrings, StringType); INSTANTIATE_TYPED_TEST_SUITE_P(TestBinary, TestStrings, BinaryType); INSTANTIATE_TYPED_TEST_SUITE_P(TestLargeString, TestStrings, LargeStringType); INSTANTIATE_TYPED_TEST_SUITE_P(TestLargeBinary, TestStrings, LargeBinaryType); +INSTANTIATE_TYPED_TEST_SUITE_P(TestStringView, TestStrings, StringViewType); +INSTANTIATE_TYPED_TEST_SUITE_P(TestBinaryView, TestStrings, BinaryViewType); TEST(TestNull, Basics) { std::shared_ptr type = null(); diff --git a/cpp/src/arrow/ipc/metadata_internal.cc b/cpp/src/arrow/ipc/metadata_internal.cc index 1394516ecd5ce..ab1a58dd1df8b 100644 --- a/cpp/src/arrow/ipc/metadata_internal.cc +++ b/cpp/src/arrow/ipc/metadata_internal.cc @@ -258,6 +258,9 @@ Status ConcreteTypeFromFlatbuffer(flatbuf::Type type, const void* type_data, case flatbuf::Type::LargeBinary: *out = large_binary(); return Status::OK(); + case flatbuf::Type::BinaryView: + *out = binary_view(); + return Status::OK(); case flatbuf::Type::FixedSizeBinary: { auto fw_binary = static_cast(type_data); return FixedSizeBinaryType::Make(fw_binary->byteWidth()).Value(out); @@ -268,6 +271,9 @@ Status ConcreteTypeFromFlatbuffer(flatbuf::Type type, const void* type_data, case flatbuf::Type::LargeUtf8: *out = large_utf8(); return Status::OK(); + case flatbuf::Type::Utf8View: + *out = utf8_view(); + return Status::OK(); case flatbuf::Type::Bool: *out = boolean(); return Status::OK(); @@ -534,6 +540,18 @@ class FieldToFlatbufferVisitor { return Status::OK(); } + Status Visit(const BinaryViewType& type) { + fb_type_ = flatbuf::Type::BinaryView; + type_offset_ = flatbuf::CreateBinaryView(fbb_).Union(); + return Status::OK(); + } + + Status Visit(const StringViewType& type) { + fb_type_ = flatbuf::Type::Utf8View; + type_offset_ = flatbuf::CreateUtf8View(fbb_).Union(); + return Status::OK(); + } + Status Visit(const LargeBinaryType& type) { fb_type_ = flatbuf::Type::LargeBinary; type_offset_ = flatbuf::CreateLargeBinary(fbb_).Union(); @@ -967,6 +985,7 @@ static Status GetBodyCompression(FBB& fbb, const IpcWriteOptions& options, static Status MakeRecordBatch(FBB& fbb, int64_t length, int64_t body_length, const std::vector& nodes, const std::vector& buffers, + const std::vector& variadic_buffer_counts, const IpcWriteOptions& options, RecordBatchOffset* offset) { FieldNodeVector fb_nodes; RETURN_NOT_OK(WriteFieldNodes(fbb, nodes, &fb_nodes)); @@ -977,7 +996,13 @@ static Status MakeRecordBatch(FBB& fbb, int64_t length, int64_t body_length, BodyCompressionOffset fb_compression; RETURN_NOT_OK(GetBodyCompression(fbb, options, &fb_compression)); - *offset = flatbuf::CreateRecordBatch(fbb, length, fb_nodes, fb_buffers, fb_compression); + flatbuffers::Offset> fb_variadic_buffer_counts{}; + if (!variadic_buffer_counts.empty()) { + fb_variadic_buffer_counts = fbb.CreateVector(variadic_buffer_counts); + } + + *offset = flatbuf::CreateRecordBatch(fbb, length, fb_nodes, fb_buffers, fb_compression, + fb_variadic_buffer_counts); return Status::OK(); } @@ -1224,11 +1249,12 @@ Status WriteRecordBatchMessage( int64_t length, int64_t body_length, const std::shared_ptr& custom_metadata, const std::vector& nodes, const std::vector& buffers, - const IpcWriteOptions& options, std::shared_ptr* out) { + const std::vector& variadic_buffer_counts, const IpcWriteOptions& options, + std::shared_ptr* out) { FBB fbb; RecordBatchOffset record_batch; - RETURN_NOT_OK( - MakeRecordBatch(fbb, length, body_length, nodes, buffers, options, &record_batch)); + RETURN_NOT_OK(MakeRecordBatch(fbb, length, body_length, nodes, buffers, + variadic_buffer_counts, options, &record_batch)); return WriteFBMessage(fbb, flatbuf::MessageHeader::RecordBatch, record_batch.Union(), body_length, options.metadata_version, custom_metadata, options.memory_pool) @@ -1285,11 +1311,12 @@ Status WriteDictionaryMessage( int64_t id, bool is_delta, int64_t length, int64_t body_length, const std::shared_ptr& custom_metadata, const std::vector& nodes, const std::vector& buffers, - const IpcWriteOptions& options, std::shared_ptr* out) { + const std::vector& variadic_buffer_counts, const IpcWriteOptions& options, + std::shared_ptr* out) { FBB fbb; RecordBatchOffset record_batch; - RETURN_NOT_OK( - MakeRecordBatch(fbb, length, body_length, nodes, buffers, options, &record_batch)); + RETURN_NOT_OK(MakeRecordBatch(fbb, length, body_length, nodes, buffers, + variadic_buffer_counts, options, &record_batch)); auto dictionary_batch = flatbuf::CreateDictionaryBatch(fbb, id, record_batch, is_delta).Union(); return WriteFBMessage(fbb, flatbuf::MessageHeader::DictionaryBatch, dictionary_batch, diff --git a/cpp/src/arrow/ipc/metadata_internal.h b/cpp/src/arrow/ipc/metadata_internal.h index abbed5b2dace0..631a336f75a9a 100644 --- a/cpp/src/arrow/ipc/metadata_internal.h +++ b/cpp/src/arrow/ipc/metadata_internal.h @@ -201,7 +201,8 @@ Status WriteRecordBatchMessage( const int64_t length, const int64_t body_length, const std::shared_ptr& custom_metadata, const std::vector& nodes, const std::vector& buffers, - const IpcWriteOptions& options, std::shared_ptr* out); + const std::vector& variadic_counts, const IpcWriteOptions& options, + std::shared_ptr* out); ARROW_EXPORT Result> WriteTensorMessage(const Tensor& tensor, @@ -225,7 +226,8 @@ Status WriteDictionaryMessage( const int64_t body_length, const std::shared_ptr& custom_metadata, const std::vector& nodes, const std::vector& buffers, - const IpcWriteOptions& options, std::shared_ptr* out); + const std::vector& variadic_counts, const IpcWriteOptions& options, + std::shared_ptr* out); static inline Result> WriteFlatbufferBuilder( flatbuffers::FlatBufferBuilder& fbb, // NOLINT non-const reference diff --git a/cpp/src/arrow/ipc/read_write_test.cc b/cpp/src/arrow/ipc/read_write_test.cc index 3ae007c20efe7..313346b5deced 100644 --- a/cpp/src/arrow/ipc/read_write_test.cc +++ b/cpp/src/arrow/ipc/read_write_test.cc @@ -159,7 +159,7 @@ TEST_P(TestMessage, SerializeCustomMetadata) { ASSERT_OK(internal::WriteRecordBatchMessage( /*length=*/0, /*body_length=*/0, metadata, /*nodes=*/{}, - /*buffers=*/{}, options_, &serialized)); + /*buffers=*/{}, /*variadic_counts=*/{}, options_, &serialized)); ASSERT_OK_AND_ASSIGN(std::unique_ptr message, Message::Open(serialized, /*body=*/nullptr)); @@ -240,23 +240,33 @@ class TestSchemaMetadata : public ::testing::Test { } }; -const std::shared_ptr INT32 = std::make_shared(); - TEST_F(TestSchemaMetadata, PrimitiveFields) { - auto f0 = field("f0", std::make_shared()); - auto f1 = field("f1", std::make_shared(), false); - auto f2 = field("f2", std::make_shared()); - auto f3 = field("f3", std::make_shared()); - auto f4 = field("f4", std::make_shared()); - auto f5 = field("f5", std::make_shared()); - auto f6 = field("f6", std::make_shared()); - auto f7 = field("f7", std::make_shared()); - auto f8 = field("f8", std::make_shared()); - auto f9 = field("f9", std::make_shared(), false); - auto f10 = field("f10", std::make_shared()); - - Schema schema({f0, f1, f2, f3, f4, f5, f6, f7, f8, f9, f10}); - CheckSchemaRoundtrip(schema); + CheckSchemaRoundtrip(Schema({ + field("f0", int8()), + field("f1", int16(), false), + field("f2", int32()), + field("f3", int64()), + field("f4", uint8()), + field("f5", uint16()), + field("f6", uint32()), + field("f7", uint64()), + field("f8", float32()), + field("f9", float64(), false), + field("f10", boolean()), + })); +} + +TEST_F(TestSchemaMetadata, BinaryFields) { + CheckSchemaRoundtrip(Schema({ + field("f0", utf8()), + field("f1", binary()), + field("f2", large_utf8()), + field("f3", large_binary()), + field("f4", utf8_view()), + field("f5", binary_view()), + field("f6", fixed_size_binary(3)), + field("f7", fixed_size_binary(33)), + })); } TEST_F(TestSchemaMetadata, PrimitiveFieldsWithKeyValueMetadata) { @@ -269,15 +279,14 @@ TEST_F(TestSchemaMetadata, PrimitiveFieldsWithKeyValueMetadata) { } TEST_F(TestSchemaMetadata, NestedFields) { - auto type = list(int32()); - auto f0 = field("f0", type); - - std::shared_ptr type2( - new StructType({field("k1", INT32), field("k2", INT32), field("k3", INT32)})); - auto f1 = field("f1", type2); - - Schema schema({f0, f1}); - CheckSchemaRoundtrip(schema); + CheckSchemaRoundtrip(Schema({ + field("f0", list(int32())), + field("f1", struct_({ + field("k1", int32()), + field("k2", int32()), + field("k3", int32()), + })), + })); } // Verify that nullable=false is well-preserved for child fields of map type. @@ -305,19 +314,15 @@ TEST_F(TestSchemaMetadata, NestedFieldsWithKeyValueMetadata) { TEST_F(TestSchemaMetadata, DictionaryFields) { { - auto dict_type = dictionary(int8(), int32(), true /* ordered */); - auto f0 = field("f0", dict_type); - auto f1 = field("f1", list(dict_type)); - - Schema schema({f0, f1}); - CheckSchemaRoundtrip(schema); + auto dict_type = dictionary(int8(), int32(), /*ordered=*/true); + CheckSchemaRoundtrip(Schema({ + field("f0", dict_type), + field("f1", list(dict_type)), + })); } { auto dict_type = dictionary(int8(), list(int32())); - auto f0 = field("f0", dict_type); - - Schema schema({f0}); - CheckSchemaRoundtrip(schema); + CheckSchemaRoundtrip(Schema({field("f0", dict_type)})); } } @@ -325,9 +330,7 @@ TEST_F(TestSchemaMetadata, NestedDictionaryFields) { { auto inner_dict_type = dictionary(int8(), int32(), /*ordered=*/true); auto dict_type = dictionary(int16(), list(inner_dict_type)); - - Schema schema({field("f0", dict_type)}); - CheckSchemaRoundtrip(schema); + CheckSchemaRoundtrip(Schema({field("f0", dict_type)})); } { auto dict_type1 = dictionary(int8(), utf8(), /*ordered=*/true); @@ -2161,6 +2164,43 @@ TEST(TestRecordBatchStreamReader, MalformedInput) { ASSERT_RAISES(Invalid, RecordBatchStreamReader::Open(&garbage_reader)); } +namespace { +class EndlessCollectListener : public CollectListener { + public: + EndlessCollectListener() : CollectListener(), decoder_(nullptr) {} + + void SetDecoder(StreamDecoder* decoder) { decoder_ = decoder; } + + arrow::Status OnEOS() override { return decoder_->Reset(); } + + private: + StreamDecoder* decoder_; +}; +}; // namespace + +TEST(TestStreamDecoder, Reset) { + auto listener = std::make_shared(); + StreamDecoder decoder(listener); + listener->SetDecoder(&decoder); + + std::shared_ptr batch; + ASSERT_OK(MakeIntRecordBatch(&batch)); + StreamWriterHelper writer_helper; + ASSERT_OK(writer_helper.Init(batch->schema(), IpcWriteOptions::Defaults())); + ASSERT_OK(writer_helper.WriteBatch(batch)); + ASSERT_OK(writer_helper.Finish()); + + ASSERT_OK_AND_ASSIGN(auto all_buffer, ConcatenateBuffers({writer_helper.buffer_, + writer_helper.buffer_})); + // Consume by Buffer + ASSERT_OK(decoder.Consume(all_buffer)); + ASSERT_EQ(2, listener->num_record_batches()); + + // Consume by raw data + ASSERT_OK(decoder.Consume(all_buffer->data(), all_buffer->size())); + ASSERT_EQ(4, listener->num_record_batches()); +} + TEST(TestStreamDecoder, NextRequiredSize) { auto listener = std::make_shared(); StreamDecoder decoder(listener); @@ -2910,21 +2950,21 @@ void GetReadRecordBatchReadRanges( // 1) read magic and footer length IO // 2) read footer IO // 3) read record batch metadata IO - ASSERT_EQ(read_ranges.size(), 3 + expected_body_read_lengths.size()); + EXPECT_EQ(read_ranges.size(), 3 + expected_body_read_lengths.size()); const int32_t magic_size = static_cast(strlen(ipc::internal::kArrowMagicBytes)); // read magic and footer length IO auto file_end_size = magic_size + sizeof(int32_t); auto footer_length_offset = buffer->size() - file_end_size; auto footer_length = bit_util::FromLittleEndian( util::SafeLoadAs(buffer->data() + footer_length_offset)); - ASSERT_EQ(read_ranges[0].length, file_end_size); + EXPECT_EQ(read_ranges[0].length, file_end_size); // read footer IO - ASSERT_EQ(read_ranges[1].length, footer_length); + EXPECT_EQ(read_ranges[1].length, footer_length); // read record batch metadata. The exact size is tricky to determine but it doesn't // matter for this test and it should be smaller than the footer. - ASSERT_LT(read_ranges[2].length, footer_length); + EXPECT_LE(read_ranges[2].length, footer_length); for (uint32_t i = 0; i < expected_body_read_lengths.size(); i++) { - ASSERT_EQ(read_ranges[3 + i].length, expected_body_read_lengths[i]); + EXPECT_EQ(read_ranges[3 + i].length, expected_body_read_lengths[i]); } } diff --git a/cpp/src/arrow/ipc/reader.cc b/cpp/src/arrow/ipc/reader.cc index 6e801e1f8adb7..5dd01f2015dd7 100644 --- a/cpp/src/arrow/ipc/reader.cc +++ b/cpp/src/arrow/ipc/reader.cc @@ -248,6 +248,15 @@ class ArrayLoader { } } + Result GetVariadicCount(int i) { + auto* variadic_counts = metadata_->variadicBufferCounts(); + CHECK_FLATBUFFERS_NOT_NULL(variadic_counts, "RecordBatch.variadicBufferCounts"); + if (i >= static_cast(variadic_counts->size())) { + return Status::IOError("variadic_count_index out of range."); + } + return static_cast(variadic_counts->Get(i)); + } + Status GetFieldMetadata(int field_index, ArrayData* out) { auto nodes = metadata_->nodes(); CHECK_FLATBUFFERS_NOT_NULL(nodes, "Table.nodes"); @@ -296,7 +305,6 @@ class ArrayLoader { return Status::OK(); } - template Status LoadBinary(Type::type type_id) { DCHECK_NE(out_, nullptr); out_->buffers.resize(3); @@ -355,7 +363,22 @@ class ArrayLoader { template enable_if_base_binary Visit(const T& type) { - return LoadBinary(type.id()); + return LoadBinary(type.id()); + } + + Status Visit(const BinaryViewType& type) { + out_->buffers.resize(2); + + RETURN_NOT_OK(LoadCommon(type.id())); + RETURN_NOT_OK(GetBuffer(buffer_index_++, &out_->buffers[1])); + + ARROW_ASSIGN_OR_RAISE(auto character_buffer_count, + GetVariadicCount(variadic_count_index_++)); + out_->buffers.resize(character_buffer_count + 2); + for (size_t i = 0; i < character_buffer_count; ++i) { + RETURN_NOT_OK(GetBuffer(buffer_index_++, &out_->buffers[i + 2])); + } + return Status::OK(); } Status Visit(const FixedSizeBinaryType& type) { @@ -450,6 +473,7 @@ class ArrayLoader { int buffer_index_ = 0; int field_index_ = 0; bool skip_io_ = false; + int variadic_count_index_ = 0; BatchDataReadRequest read_request_; const Field* field_ = nullptr; @@ -580,10 +604,9 @@ Result> LoadRecordBatchSubset( // swap endian in a set of ArrayData if necessary (swap_endian == true) if (context.swap_endian) { - for (int i = 0; i < static_cast(filtered_columns.size()); ++i) { - ARROW_ASSIGN_OR_RAISE(filtered_columns[i], - arrow::internal::SwapEndianArrayData( - filtered_columns[i], context.options.memory_pool)); + for (auto& filtered_column : filtered_columns) { + ARROW_ASSIGN_OR_RAISE(filtered_column, + arrow::internal::SwapEndianArrayData(filtered_column)); } } return RecordBatch::Make(std::move(filtered_schema), metadata->length(), @@ -909,14 +932,18 @@ class StreamDecoderInternal : public MessageDecoderListener { return listener_->OnEOS(); } + std::shared_ptr listener() const { return listener_; } + Listener* raw_listener() const { return listener_.get(); } + IpcReadOptions options() const { return options_; } + + State state() const { return state_; } + std::shared_ptr schema() const { return filtered_schema_; } ReadStats stats() const { return stats_; } - State state() const { return state_; } - int num_required_initial_dictionaries() const { return num_required_initial_dictionaries_; } @@ -2016,6 +2043,8 @@ class StreamDecoder::StreamDecoderImpl : public StreamDecoderInternal { int64_t next_required_size() const { return message_decoder_.next_required_size(); } + const MessageDecoder* message_decoder() const { return &message_decoder_; } + private: MessageDecoder message_decoder_; }; @@ -2027,10 +2056,75 @@ StreamDecoder::StreamDecoder(std::shared_ptr listener, IpcReadOptions StreamDecoder::~StreamDecoder() {} Status StreamDecoder::Consume(const uint8_t* data, int64_t size) { - return impl_->Consume(data, size); + while (size > 0) { + const auto next_required_size = impl_->next_required_size(); + if (next_required_size == 0) { + break; + } + if (size < next_required_size) { + break; + } + ARROW_RETURN_NOT_OK(impl_->Consume(data, next_required_size)); + data += next_required_size; + size -= next_required_size; + } + if (size > 0) { + return impl_->Consume(data, size); + } else { + return arrow::Status::OK(); + } } + Status StreamDecoder::Consume(std::shared_ptr buffer) { - return impl_->Consume(std::move(buffer)); + if (buffer->size() == 0) { + return arrow::Status::OK(); + } + if (impl_->next_required_size() == 0 || buffer->size() <= impl_->next_required_size()) { + return impl_->Consume(std::move(buffer)); + } else { + int64_t offset = 0; + while (true) { + const auto next_required_size = impl_->next_required_size(); + if (next_required_size == 0) { + break; + } + if (buffer->size() - offset <= next_required_size) { + break; + } + if (buffer->is_cpu()) { + switch (impl_->message_decoder()->state()) { + case MessageDecoder::State::INITIAL: + case MessageDecoder::State::METADATA_LENGTH: + // We don't need to pass a sliced buffer because + // MessageDecoder doesn't keep reference of the given + // buffer on these states. + ARROW_RETURN_NOT_OK( + impl_->Consume(buffer->data() + offset, next_required_size)); + break; + default: + ARROW_RETURN_NOT_OK( + impl_->Consume(SliceBuffer(buffer, offset, next_required_size))); + break; + } + } else { + ARROW_RETURN_NOT_OK( + impl_->Consume(SliceBuffer(buffer, offset, next_required_size))); + } + offset += next_required_size; + } + if (buffer->size() - offset == 0) { + return arrow::Status::OK(); + } else if (offset == 0) { + return impl_->Consume(std::move(buffer)); + } else { + return impl_->Consume(SliceBuffer(std::move(buffer), offset)); + } + } +} + +Status StreamDecoder::Reset() { + impl_ = std::make_unique(impl_->listener(), impl_->options()); + return Status::OK(); } std::shared_ptr StreamDecoder::schema() const { return impl_->schema(); } diff --git a/cpp/src/arrow/ipc/reader.h b/cpp/src/arrow/ipc/reader.h index 0d7ae22264052..de4606094049c 100644 --- a/cpp/src/arrow/ipc/reader.h +++ b/cpp/src/arrow/ipc/reader.h @@ -425,6 +425,14 @@ class ARROW_EXPORT StreamDecoder { /// \return Status Status Consume(std::shared_ptr buffer); + /// \brief Reset the internal status. + /// + /// You can reuse this decoder for new stream after calling + /// this. + /// + /// \return Status + Status Reset(); + /// \return the shared schema of the record batches in the stream std::shared_ptr schema() const; diff --git a/cpp/src/arrow/ipc/test_common.cc b/cpp/src/arrow/ipc/test_common.cc index 53721c0b20fbc..6faaf96b332d4 100644 --- a/cpp/src/arrow/ipc/test_common.cc +++ b/cpp/src/arrow/ipc/test_common.cc @@ -351,39 +351,32 @@ static Status MakeBinaryArrayWithUniqueValues(int64_t length, bool include_nulls return builder.Finish(out); } -Status MakeStringTypesRecordBatch(std::shared_ptr* out, bool with_nulls) { +Status MakeStringTypesRecordBatch(std::shared_ptr* out, bool with_nulls, + bool with_view_types) { const int64_t length = 500; - auto f0 = field("strings", utf8()); - auto f1 = field("binaries", binary()); - auto f2 = field("large_strings", large_utf8()); - auto f3 = field("large_binaries", large_binary()); - auto schema = ::arrow::schema({f0, f1, f2, f3}); - - std::shared_ptr a0, a1, a2, a3; - MemoryPool* pool = default_memory_pool(); - // Quirk with RETURN_NOT_OK macro and templated functions - { - auto s = - MakeBinaryArrayWithUniqueValues(length, with_nulls, pool, &a0); - RETURN_NOT_OK(s); + ArrayVector arrays; + FieldVector fields; + + auto AppendColumn = [&](auto& MakeArray) { + arrays.emplace_back(); + RETURN_NOT_OK(MakeArray(length, with_nulls, default_memory_pool(), &arrays.back())); + + const auto& type = arrays.back()->type(); + fields.push_back(field(type->ToString(), type)); + return Status::OK(); + }; + + RETURN_NOT_OK(AppendColumn(MakeBinaryArrayWithUniqueValues)); + RETURN_NOT_OK(AppendColumn(MakeBinaryArrayWithUniqueValues)); + RETURN_NOT_OK(AppendColumn(MakeBinaryArrayWithUniqueValues)); + RETURN_NOT_OK(AppendColumn(MakeBinaryArrayWithUniqueValues)); + if (with_view_types) { + RETURN_NOT_OK(AppendColumn(MakeBinaryArrayWithUniqueValues)); + RETURN_NOT_OK(AppendColumn(MakeBinaryArrayWithUniqueValues)); } - { - auto s = - MakeBinaryArrayWithUniqueValues(length, with_nulls, pool, &a1); - RETURN_NOT_OK(s); - } - { - auto s = MakeBinaryArrayWithUniqueValues(length, with_nulls, pool, - &a2); - RETURN_NOT_OK(s); - } - { - auto s = MakeBinaryArrayWithUniqueValues(length, with_nulls, pool, - &a3); - RETURN_NOT_OK(s); - } - *out = RecordBatch::Make(schema, length, {a0, a1, a2, a3}); + + *out = RecordBatch::Make(schema(std::move(fields)), length, std::move(arrays)); return Status::OK(); } diff --git a/cpp/src/arrow/ipc/test_common.h b/cpp/src/arrow/ipc/test_common.h index 5e0c65556c630..fc0c8ddbea319 100644 --- a/cpp/src/arrow/ipc/test_common.h +++ b/cpp/src/arrow/ipc/test_common.h @@ -96,7 +96,7 @@ Status MakeRandomStringArray(int64_t length, bool include_nulls, MemoryPool* poo ARROW_TESTING_EXPORT Status MakeStringTypesRecordBatch(std::shared_ptr* out, - bool with_nulls = true); + bool with_nulls = true, bool with_view_types = true); ARROW_TESTING_EXPORT Status MakeStringTypesRecordBatchWithNulls(std::shared_ptr* out); diff --git a/cpp/src/arrow/ipc/writer.cc b/cpp/src/arrow/ipc/writer.cc index e4b49ed56464e..9668f459d0d31 100644 --- a/cpp/src/arrow/ipc/writer.cc +++ b/cpp/src/arrow/ipc/writer.cc @@ -52,10 +52,12 @@ #include "arrow/util/checked_cast.h" #include "arrow/util/compression.h" #include "arrow/util/endian.h" +#include "arrow/util/int_util_overflow.h" #include "arrow/util/key_value_metadata.h" #include "arrow/util/logging.h" #include "arrow/util/parallel.h" #include "arrow/visit_array_inline.h" +#include "arrow/visit_data_inline.h" #include "arrow/visit_type_inline.h" namespace arrow { @@ -174,7 +176,8 @@ class RecordBatchSerializer { // Override this for writing dictionary metadata virtual Status SerializeMetadata(int64_t num_rows) { return WriteRecordBatchMessage(num_rows, out_->body_length, custom_metadata_, - field_nodes_, buffer_meta_, options_, &out_->metadata); + field_nodes_, buffer_meta_, variadic_counts_, options_, + &out_->metadata); } bool ShouldCompress(int64_t uncompressed_size, int64_t compressed_size) const { @@ -296,6 +299,8 @@ class RecordBatchSerializer { offset += size + padding; } + variadic_counts_ = out_->variadic_buffer_counts; + out_->body_length = offset - buffer_start_offset_; DCHECK(bit_util::IsMultipleOf8(out_->body_length)); @@ -403,6 +408,18 @@ class RecordBatchSerializer { return Status::OK(); } + Status Visit(const BinaryViewArray& array) { + auto views = SliceBuffer(array.values(), array.offset() * BinaryViewType::kSize, + array.length() * BinaryViewType::kSize); + out_->body_buffers.emplace_back(std::move(views)); + + out_->variadic_buffer_counts.emplace_back(array.data()->buffers.size() - 2); + for (size_t i = 2; i < array.data()->buffers.size(); ++i) { + out_->body_buffers.emplace_back(array.data()->buffers[i]); + } + return Status::OK(); + } + template enable_if_var_size_list Visit(const T& array) { using offset_type = typename T::offset_type; @@ -590,6 +607,7 @@ class RecordBatchSerializer { std::vector field_nodes_; std::vector buffer_meta_; + std::vector variadic_counts_; const IpcWriteOptions& options_; int64_t max_recursion_depth_; @@ -606,8 +624,8 @@ class DictionarySerializer : public RecordBatchSerializer { Status SerializeMetadata(int64_t num_rows) override { return WriteDictionaryMessage(dictionary_id_, is_delta_, num_rows, out_->body_length, - custom_metadata_, field_nodes_, buffer_meta_, options_, - &out_->metadata); + custom_metadata_, field_nodes_, buffer_meta_, + variadic_counts_, options_, &out_->metadata); } Status Assemble(const std::shared_ptr& dictionary) { diff --git a/cpp/src/arrow/ipc/writer.h b/cpp/src/arrow/ipc/writer.h index 9e18a213ba3f2..4e0ee3dfc8b44 100644 --- a/cpp/src/arrow/ipc/writer.h +++ b/cpp/src/arrow/ipc/writer.h @@ -57,6 +57,7 @@ struct IpcPayload { MessageType type = MessageType::NONE; std::shared_ptr metadata; std::vector> body_buffers; + std::vector variadic_buffer_counts; int64_t body_length = 0; // serialized body length (padded, maybe compressed) int64_t raw_body_length = 0; // initial uncompressed body length }; diff --git a/cpp/src/arrow/json/converter.cc b/cpp/src/arrow/json/converter.cc index 04ebe4714ceec..c393b77acf334 100644 --- a/cpp/src/arrow/json/converter.cc +++ b/cpp/src/arrow/json/converter.cc @@ -304,6 +304,8 @@ Status MakeConverter(const std::shared_ptr& out_type, MemoryPool* pool CONVERTER_CASE(Type::STRING, BinaryConverter); CONVERTER_CASE(Type::LARGE_BINARY, BinaryConverter); CONVERTER_CASE(Type::LARGE_STRING, BinaryConverter); + CONVERTER_CASE(Type::BINARY_VIEW, BinaryConverter); + CONVERTER_CASE(Type::STRING_VIEW, BinaryConverter); CONVERTER_CASE(Type::DECIMAL128, DecimalConverter); CONVERTER_CASE(Type::DECIMAL256, DecimalConverter); default: diff --git a/cpp/src/arrow/json/converter_test.cc b/cpp/src/arrow/json/converter_test.cc index 378a4491c0e62..cfc44c99976d5 100644 --- a/cpp/src/arrow/json/converter_test.cc +++ b/cpp/src/arrow/json/converter_test.cc @@ -131,8 +131,8 @@ TEST(ConverterTest, Floats) { } } -TEST(ConverterTest, StringAndLargeString) { - for (auto string_type : {utf8(), large_utf8()}) { +TEST(ConverterTest, StringAndLargeStringAndStringView) { + for (auto string_type : {utf8(), large_utf8(), utf8_view()}) { ParseOptions options; options.explicit_schema = schema({field("", string_type)}); diff --git a/cpp/src/arrow/json/parser.cc b/cpp/src/arrow/json/parser.cc index e2941a29ab9bd..185dcde355f0a 100644 --- a/cpp/src/arrow/json/parser.cc +++ b/cpp/src/arrow/json/parser.cc @@ -104,6 +104,7 @@ Status Kind::ForType(const DataType& type, Kind::type* kind) { Status Visit(const DateType&) { return SetKind(Kind::kNumber); } Status Visit(const BinaryType&) { return SetKind(Kind::kString); } Status Visit(const LargeBinaryType&) { return SetKind(Kind::kString); } + Status Visit(const BinaryViewType&) { return SetKind(Kind::kString); } Status Visit(const TimestampType&) { return SetKind(Kind::kString); } Status Visit(const DecimalType&) { return SetKind(Kind::kNumberOrString); } Status Visit(const DictionaryType& dict_type) { diff --git a/cpp/src/arrow/json/test_common.h b/cpp/src/arrow/json/test_common.h index 0f7b3466fdbc9..f7ab6fd10275f 100644 --- a/cpp/src/arrow/json/test_common.h +++ b/cpp/src/arrow/json/test_common.h @@ -110,8 +110,7 @@ struct GenerateImpl { return OK(writer.Double(val)); } - template - enable_if_base_binary Visit(const T&) { + Status GenerateAscii(const DataType&) { auto size = std::poisson_distribution<>{4}(e); std::uniform_int_distribution gen_char(32, 126); // FIXME generate UTF8 std::string s(size, '\0'); @@ -119,6 +118,13 @@ struct GenerateImpl { return OK(writer.String(s.c_str())); } + template + enable_if_base_binary Visit(const T& t) { + return GenerateAscii(t); + } + + Status Visit(const BinaryViewType& t) { return GenerateAscii(t); } + template enable_if_list_like Visit(const T& t) { auto size = std::poisson_distribution<>{4}(e); diff --git a/cpp/src/arrow/pretty_print.cc b/cpp/src/arrow/pretty_print.cc index a5410df7e9ae2..b392e027a6b89 100644 --- a/cpp/src/arrow/pretty_print.cc +++ b/cpp/src/arrow/pretty_print.cc @@ -229,18 +229,13 @@ class ArrayPrinter : public PrettyPrinter { } template - enable_if_string_like WriteDataValues(const ArrayType& array) { + enable_if_has_string_view WriteDataValues(const ArrayType& array) { return WriteValues(array, [&](int64_t i) { - (*sink_) << "\"" << array.GetView(i) << "\""; - return Status::OK(); - }); - } - - template - enable_if_t::value && !is_decimal_type::value, Status> - WriteDataValues(const ArrayType& array) { - return WriteValues(array, [&](int64_t i) { - (*sink_) << HexEncode(array.GetView(i)); + if constexpr (T::is_utf8) { + (*sink_) << "\"" << array.GetView(i) << "\""; + } else { + (*sink_) << HexEncode(array.GetView(i)); + } return Status::OK(); }); } @@ -302,6 +297,7 @@ class ArrayPrinter : public PrettyPrinter { std::is_base_of::value || std::is_base_of::value || std::is_base_of::value || + std::is_base_of::value || std::is_base_of::value || std::is_base_of::value || std::is_base_of::value || diff --git a/cpp/src/arrow/scalar.cc b/cpp/src/arrow/scalar.cc index b2ad1ad519bb2..167e272705268 100644 --- a/cpp/src/arrow/scalar.cc +++ b/cpp/src/arrow/scalar.cc @@ -263,6 +263,12 @@ struct ScalarValidateImpl { Status Visit(const StringScalar& s) { return ValidateStringScalar(s); } + Status Visit(const BinaryViewScalar& s) { return ValidateBinaryScalar(s); } + + Status Visit(const StringViewScalar& s) { return ValidateStringScalar(s); } + + Status Visit(const LargeBinaryScalar& s) { return ValidateBinaryScalar(s); } + Status Visit(const LargeStringScalar& s) { return ValidateStringScalar(s); } template @@ -548,17 +554,8 @@ Status Scalar::ValidateFull() const { return ScalarValidateImpl(/*full_validation=*/true).Validate(*this); } -BinaryScalar::BinaryScalar(std::string s) - : BinaryScalar(Buffer::FromString(std::move(s))) {} - -StringScalar::StringScalar(std::string s) - : StringScalar(Buffer::FromString(std::move(s))) {} - -LargeBinaryScalar::LargeBinaryScalar(std::string s) - : LargeBinaryScalar(Buffer::FromString(std::move(s))) {} - -LargeStringScalar::LargeStringScalar(std::string s) - : LargeStringScalar(Buffer::FromString(std::move(s))) {} +BaseBinaryScalar::BaseBinaryScalar(std::string s, std::shared_ptr type) + : BaseBinaryScalar(Buffer::FromString(std::move(s)), std::move(type)) {} FixedSizeBinaryScalar::FixedSizeBinaryScalar(std::shared_ptr value, std::shared_ptr type, diff --git a/cpp/src/arrow/scalar.h b/cpp/src/arrow/scalar.h index 1d1ce4aa72948..5175b0128524c 100644 --- a/cpp/src/arrow/scalar.h +++ b/cpp/src/arrow/scalar.h @@ -263,24 +263,21 @@ struct ARROW_EXPORT BaseBinaryScalar return value ? std::string_view(*value) : std::string_view(); } - protected: BaseBinaryScalar(std::shared_ptr value, std::shared_ptr type) : internal::PrimitiveScalarBase{std::move(type), true}, value(std::move(value)) {} friend ArraySpan; + BaseBinaryScalar(std::string s, std::shared_ptr type); }; struct ARROW_EXPORT BinaryScalar : public BaseBinaryScalar { using BaseBinaryScalar::BaseBinaryScalar; using TypeClass = BinaryType; - BinaryScalar(std::shared_ptr value, std::shared_ptr type) - : BaseBinaryScalar(std::move(value), std::move(type)) {} - explicit BinaryScalar(std::shared_ptr value) : BinaryScalar(std::move(value), binary()) {} - explicit BinaryScalar(std::string s); + explicit BinaryScalar(std::string s) : BaseBinaryScalar(std::move(s), binary()) {} BinaryScalar() : BinaryScalar(binary()) {} }; @@ -292,11 +289,39 @@ struct ARROW_EXPORT StringScalar : public BinaryScalar { explicit StringScalar(std::shared_ptr value) : StringScalar(std::move(value), utf8()) {} - explicit StringScalar(std::string s); + explicit StringScalar(std::string s) : BinaryScalar(std::move(s), utf8()) {} StringScalar() : StringScalar(utf8()) {} }; +struct ARROW_EXPORT BinaryViewScalar : public BaseBinaryScalar { + using BaseBinaryScalar::BaseBinaryScalar; + using TypeClass = BinaryViewType; + + explicit BinaryViewScalar(std::shared_ptr value) + : BinaryViewScalar(std::move(value), binary_view()) {} + + explicit BinaryViewScalar(std::string s) + : BaseBinaryScalar(std::move(s), binary_view()) {} + + BinaryViewScalar() : BinaryViewScalar(binary_view()) {} + + std::string_view view() const override { return std::string_view(*this->value); } +}; + +struct ARROW_EXPORT StringViewScalar : public BinaryViewScalar { + using BinaryViewScalar::BinaryViewScalar; + using TypeClass = StringViewType; + + explicit StringViewScalar(std::shared_ptr value) + : StringViewScalar(std::move(value), utf8_view()) {} + + explicit StringViewScalar(std::string s) + : BinaryViewScalar(std::move(s), utf8_view()) {} + + StringViewScalar() : StringViewScalar(utf8_view()) {} +}; + struct ARROW_EXPORT LargeBinaryScalar : public BaseBinaryScalar { using BaseBinaryScalar::BaseBinaryScalar; using TypeClass = LargeBinaryType; @@ -307,7 +332,8 @@ struct ARROW_EXPORT LargeBinaryScalar : public BaseBinaryScalar { explicit LargeBinaryScalar(std::shared_ptr value) : LargeBinaryScalar(std::move(value), large_binary()) {} - explicit LargeBinaryScalar(std::string s); + explicit LargeBinaryScalar(std::string s) + : BaseBinaryScalar(std::move(s), large_binary()) {} LargeBinaryScalar() : LargeBinaryScalar(large_binary()) {} }; @@ -319,7 +345,8 @@ struct ARROW_EXPORT LargeStringScalar : public LargeBinaryScalar { explicit LargeStringScalar(std::shared_ptr value) : LargeStringScalar(std::move(value), large_utf8()) {} - explicit LargeStringScalar(std::string s); + explicit LargeStringScalar(std::string s) + : LargeBinaryScalar(std::move(s), large_utf8()) {} LargeStringScalar() : LargeStringScalar(large_utf8()) {} }; diff --git a/cpp/src/arrow/testing/CMakeLists.txt b/cpp/src/arrow/testing/CMakeLists.txt index d5332405964ba..59825f0bf227a 100644 --- a/cpp/src/arrow/testing/CMakeLists.txt +++ b/cpp/src/arrow/testing/CMakeLists.txt @@ -19,4 +19,5 @@ arrow_install_all_headers("arrow/testing") if(ARROW_BUILD_TESTS) add_arrow_test(random_test) + add_arrow_test(gtest_util_test) endif() diff --git a/cpp/src/arrow/testing/gtest_util.cc b/cpp/src/arrow/testing/gtest_util.cc index a6dc1d59c67a9..5ef1820d5b581 100644 --- a/cpp/src/arrow/testing/gtest_util.cc +++ b/cpp/src/arrow/testing/gtest_util.cc @@ -145,42 +145,46 @@ void AssertScalarsApproxEqual(const Scalar& expected, const Scalar& actual, bool } void AssertBatchesEqual(const RecordBatch& expected, const RecordBatch& actual, - bool check_metadata) { + bool check_metadata, const EqualOptions& options) { AssertTsSame(expected, actual, [&](const RecordBatch& expected, const RecordBatch& actual) { - return expected.Equals(actual, check_metadata); + return expected.Equals(actual, check_metadata, options); }); } -void AssertBatchesApproxEqual(const RecordBatch& expected, const RecordBatch& actual) { +void AssertBatchesApproxEqual(const RecordBatch& expected, const RecordBatch& actual, + const EqualOptions& options) { AssertTsSame(expected, actual, [&](const RecordBatch& expected, const RecordBatch& actual) { - return expected.ApproxEquals(actual); + return expected.ApproxEquals(actual, options); }); } -void AssertChunkedEqual(const ChunkedArray& expected, const ChunkedArray& actual) { +void AssertChunkedEqual(const ChunkedArray& expected, const ChunkedArray& actual, + const EqualOptions& options) { ASSERT_EQ(expected.num_chunks(), actual.num_chunks()) << "# chunks unequal"; - if (!actual.Equals(expected)) { + if (!actual.Equals(expected, options)) { std::stringstream diff; for (int i = 0; i < actual.num_chunks(); ++i) { auto c1 = actual.chunk(i); auto c2 = expected.chunk(i); diff << "# chunk " << i << std::endl; - ARROW_IGNORE_EXPR(c1->Equals(c2, EqualOptions().diff_sink(&diff))); + ARROW_IGNORE_EXPR(c1->Equals(c2, options.diff_sink(&diff))); } FAIL() << diff.str(); } } -void AssertChunkedEqual(const ChunkedArray& actual, const ArrayVector& expected) { - AssertChunkedEqual(ChunkedArray(expected, actual.type()), actual); +void AssertChunkedEqual(const ChunkedArray& actual, const ArrayVector& expected, + const EqualOptions& options) { + AssertChunkedEqual(ChunkedArray(expected, actual.type()), actual, options); } -void AssertChunkedEquivalent(const ChunkedArray& expected, const ChunkedArray& actual) { +void AssertChunkedEquivalent(const ChunkedArray& expected, const ChunkedArray& actual, + const EqualOptions& options) { // XXX: AssertChunkedEqual in gtest_util.h does not permit the chunk layouts // to be different - if (!actual.Equals(expected)) { + if (!actual.Equals(expected, options)) { std::stringstream pp_expected; std::stringstream pp_actual; ::arrow::PrettyPrintOptions options(/*indent=*/2); @@ -321,21 +325,23 @@ ASSERT_EQUAL_IMPL(Field, Field, "fields") ASSERT_EQUAL_IMPL(Schema, Schema, "schemas") #undef ASSERT_EQUAL_IMPL -void AssertDatumsEqual(const Datum& expected, const Datum& actual, bool verbose) { +void AssertDatumsEqual(const Datum& expected, const Datum& actual, bool verbose, + const EqualOptions& options) { ASSERT_EQ(expected.kind(), actual.kind()) << "expected:" << expected.ToString() << " got:" << actual.ToString(); switch (expected.kind()) { case Datum::SCALAR: - AssertScalarsEqual(*expected.scalar(), *actual.scalar(), verbose); + AssertScalarsEqual(*expected.scalar(), *actual.scalar(), verbose, options); break; case Datum::ARRAY: { auto expected_array = expected.make_array(); auto actual_array = actual.make_array(); - AssertArraysEqual(*expected_array, *actual_array, verbose); + AssertArraysEqual(*expected_array, *actual_array, verbose, options); } break; case Datum::CHUNKED_ARRAY: - AssertChunkedEquivalent(*expected.chunked_array(), *actual.chunked_array()); + AssertChunkedEquivalent(*expected.chunked_array(), *actual.chunked_array(), + options); break; default: // TODO: Implement better print @@ -479,7 +485,7 @@ Result> PrintArrayDiff(const ChunkedArray& expected, } void AssertTablesEqual(const Table& expected, const Table& actual, bool same_chunk_layout, - bool combine_chunks) { + bool combine_chunks, const EqualOptions& options) { ASSERT_EQ(expected.num_columns(), actual.num_columns()); if (combine_chunks) { @@ -487,13 +493,13 @@ void AssertTablesEqual(const Table& expected, const Table& actual, bool same_chu ASSERT_OK_AND_ASSIGN(auto new_expected, expected.CombineChunks(pool)); ASSERT_OK_AND_ASSIGN(auto new_actual, actual.CombineChunks(pool)); - AssertTablesEqual(*new_expected, *new_actual, false, false); + AssertTablesEqual(*new_expected, *new_actual, false, false, options); return; } if (same_chunk_layout) { for (int i = 0; i < actual.num_columns(); ++i) { - AssertChunkedEqual(*expected.column(i), *actual.column(i)); + AssertChunkedEqual(*expected.column(i), *actual.column(i), options); } } else { std::stringstream ss; @@ -533,17 +539,18 @@ void CompareBatchWith(const RecordBatch& left, const RecordBatch& right, } void CompareBatch(const RecordBatch& left, const RecordBatch& right, - bool compare_metadata) { + bool compare_metadata, const EqualOptions& options) { return CompareBatchWith( left, right, compare_metadata, - [](const Array& left, const Array& right) { return left.Equals(right); }); + [&](const Array& left, const Array& right) { return left.Equals(right, options); }); } void ApproxCompareBatch(const RecordBatch& left, const RecordBatch& right, - bool compare_metadata) { - return CompareBatchWith( - left, right, compare_metadata, - [](const Array& left, const Array& right) { return left.ApproxEquals(right); }); + bool compare_metadata, const EqualOptions& options) { + return CompareBatchWith(left, right, compare_metadata, + [&](const Array& left, const Array& right) { + return left.ApproxEquals(right, options); + }); } std::shared_ptr TweakValidityBit(const std::shared_ptr& array, diff --git a/cpp/src/arrow/testing/gtest_util.h b/cpp/src/arrow/testing/gtest_util.h index bb462af86a5f2..916067d85b753 100644 --- a/cpp/src/arrow/testing/gtest_util.h +++ b/cpp/src/arrow/testing/gtest_util.h @@ -176,10 +176,17 @@ using DecimalArrowTypes = ::testing::Types; using BaseBinaryArrowTypes = ::testing::Types; +using BaseBinaryOrBinaryViewLikeArrowTypes = + ::testing::Types; + using BinaryArrowTypes = ::testing::Types; using StringArrowTypes = ::testing::Types; +using StringOrStringViewArrowTypes = + ::testing::Types; + using ListArrowTypes = ::testing::Types; using UnionArrowTypes = ::testing::Types; @@ -214,18 +221,22 @@ ARROW_TESTING_EXPORT void AssertScalarsEqual( ARROW_TESTING_EXPORT void AssertScalarsApproxEqual( const Scalar& expected, const Scalar& actual, bool verbose = false, const EqualOptions& options = TestingEqualOptions()); -ARROW_TESTING_EXPORT void AssertBatchesEqual(const RecordBatch& expected, - const RecordBatch& actual, - bool check_metadata = false); -ARROW_TESTING_EXPORT void AssertBatchesApproxEqual(const RecordBatch& expected, - const RecordBatch& actual); -ARROW_TESTING_EXPORT void AssertChunkedEqual(const ChunkedArray& expected, - const ChunkedArray& actual); -ARROW_TESTING_EXPORT void AssertChunkedEqual(const ChunkedArray& actual, - const ArrayVector& expected); +ARROW_TESTING_EXPORT void AssertBatchesEqual( + const RecordBatch& expected, const RecordBatch& actual, bool check_metadata = false, + const EqualOptions& options = TestingEqualOptions()); +ARROW_TESTING_EXPORT void AssertBatchesApproxEqual( + const RecordBatch& expected, const RecordBatch& actual, + const EqualOptions& options = TestingEqualOptions()); +ARROW_TESTING_EXPORT void AssertChunkedEqual( + const ChunkedArray& expected, const ChunkedArray& actual, + const EqualOptions& options = TestingEqualOptions()); +ARROW_TESTING_EXPORT void AssertChunkedEqual( + const ChunkedArray& actual, const ArrayVector& expected, + const EqualOptions& options = TestingEqualOptions()); // Like ChunkedEqual, but permits different chunk layout -ARROW_TESTING_EXPORT void AssertChunkedEquivalent(const ChunkedArray& expected, - const ChunkedArray& actual); +ARROW_TESTING_EXPORT void AssertChunkedEquivalent( + const ChunkedArray& expected, const ChunkedArray& actual, + const EqualOptions& options = TestingEqualOptions()); ARROW_TESTING_EXPORT void AssertChunkedApproxEquivalent( const ChunkedArray& expected, const ChunkedArray& actual, const EqualOptions& options = TestingEqualOptions()); @@ -270,12 +281,13 @@ ARROW_TESTING_EXPORT void AssertSchemaNotEqual(const std::shared_ptr& lh ARROW_TESTING_EXPORT Result> PrintArrayDiff( const ChunkedArray& expected, const ChunkedArray& actual); -ARROW_TESTING_EXPORT void AssertTablesEqual(const Table& expected, const Table& actual, - bool same_chunk_layout = true, - bool flatten = false); +ARROW_TESTING_EXPORT void AssertTablesEqual( + const Table& expected, const Table& actual, bool same_chunk_layout = true, + bool flatten = false, const EqualOptions& options = TestingEqualOptions()); -ARROW_TESTING_EXPORT void AssertDatumsEqual(const Datum& expected, const Datum& actual, - bool verbose = false); +ARROW_TESTING_EXPORT void AssertDatumsEqual( + const Datum& expected, const Datum& actual, bool verbose = false, + const EqualOptions& options = TestingEqualOptions()); ARROW_TESTING_EXPORT void AssertDatumsApproxEqual( const Datum& expected, const Datum& actual, bool verbose = false, const EqualOptions& options = TestingEqualOptions()); @@ -289,12 +301,13 @@ void AssertNumericDataEqual(const C_TYPE* raw_data, } } -ARROW_TESTING_EXPORT void CompareBatch(const RecordBatch& left, const RecordBatch& right, - bool compare_metadata = true); +ARROW_TESTING_EXPORT void CompareBatch( + const RecordBatch& left, const RecordBatch& right, bool compare_metadata = true, + const EqualOptions& options = TestingEqualOptions()); -ARROW_TESTING_EXPORT void ApproxCompareBatch(const RecordBatch& left, - const RecordBatch& right, - bool compare_metadata = true); +ARROW_TESTING_EXPORT void ApproxCompareBatch( + const RecordBatch& left, const RecordBatch& right, bool compare_metadata = true, + const EqualOptions& options = TestingEqualOptions()); // Check if the padding of the buffers of the array is zero. // Also cause valgrind warnings if the padding bytes are uninitialized. diff --git a/cpp/src/arrow/testing/gtest_util_test.cc b/cpp/src/arrow/testing/gtest_util_test.cc new file mode 100644 index 0000000000000..14c17a972aa06 --- /dev/null +++ b/cpp/src/arrow/testing/gtest_util_test.cc @@ -0,0 +1,137 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include + +#include "arrow/array.h" +#include "arrow/array/builder_decimal.h" +#include "arrow/datum.h" +#include "arrow/record_batch.h" +#include "arrow/testing/gtest_util.h" +#include "arrow/testing/random.h" +#include "arrow/type.h" +#include "arrow/type_traits.h" +#include "arrow/util/checked_cast.h" + +namespace arrow { + +// Test basic cases for contains NaN. +class TestAssertContainsNaN : public ::testing::Test {}; + +TEST_F(TestAssertContainsNaN, BatchesEqual) { + auto schema = ::arrow::schema({ + {field("a", float32())}, + {field("b", float64())}, + }); + + auto expected = RecordBatchFromJSON(schema, + R"([{"a": 3, "b": 5}, + {"a": 1, "b": 3}, + {"a": 3, "b": 4}, + {"a": NaN, "b": 6}, + {"a": 2, "b": 5}, + {"a": 1, "b": NaN}, + {"a": 1, "b": 3} + ])"); + auto actual = RecordBatchFromJSON(schema, + R"([{"a": 3, "b": 5}, + {"a": 1, "b": 3}, + {"a": 3, "b": 4}, + {"a": NaN, "b": 6}, + {"a": 2, "b": 5}, + {"a": 1, "b": NaN}, + {"a": 1, "b": 3} + ])"); + ASSERT_BATCHES_EQUAL(*expected, *actual); + AssertBatchesApproxEqual(*expected, *actual); +} + +TEST_F(TestAssertContainsNaN, TableEqual) { + auto schema = ::arrow::schema({ + {field("a", float32())}, + {field("b", float64())}, + }); + + auto expected = TableFromJSON(schema, {R"([{"a": null, "b": 5}, + {"a": NaN, "b": 3}, + {"a": 3, "b": null} + ])", + R"([{"a": null, "b": null}, + {"a": 2, "b": NaN}, + {"a": 1, "b": 5}, + {"a": 3, "b": 5} + ])"}); + auto actual = TableFromJSON(schema, {R"([{"a": null, "b": 5}, + {"a": NaN, "b": 3}, + {"a": 3, "b": null} + ])", + R"([{"a": null, "b": null}, + {"a": 2, "b": NaN}, + {"a": 1, "b": 5}, + {"a": 3, "b": 5} + ])"}); + ASSERT_TABLES_EQUAL(*expected, *actual); +} + +TEST_F(TestAssertContainsNaN, ArrayEqual) { + auto expected = ArrayFromJSON(float64(), "[0, 1, 2, NaN]"); + auto actual = ArrayFromJSON(float64(), "[0, 1, 2, NaN]"); + AssertArraysEqual(*expected, *actual); +} + +TEST_F(TestAssertContainsNaN, ChunkedEqual) { + auto expected = ChunkedArrayFromJSON(float64(), { + "[null, 1]", + "[3, NaN, 2]", + "[NaN]", + }); + + auto actual = ChunkedArrayFromJSON(float64(), { + "[null, 1]", + "[3, NaN, 2]", + "[NaN]", + }); + AssertChunkedEqual(*expected, *actual); +} + +TEST_F(TestAssertContainsNaN, DatumEqual) { + // scalar + auto expected_scalar = ScalarFromJSON(float64(), "NaN"); + auto actual_scalar = ScalarFromJSON(float64(), "NaN"); + AssertDatumsEqual(expected_scalar, actual_scalar); + + // array + auto expected_array = ArrayFromJSON(float64(), "[3, NaN, 2, 1, 5]"); + auto actual_array = ArrayFromJSON(float64(), "[3, NaN, 2, 1, 5]"); + AssertDatumsEqual(expected_array, actual_array); + + // chunked array + auto expected_chunked = ChunkedArrayFromJSON(float64(), { + "[null, 1]", + "[3, NaN, 2]", + "[NaN]", + }); + + auto actual_chunked = ChunkedArrayFromJSON(float64(), { + "[null, 1]", + "[3, NaN, 2]", + "[NaN]", + }); + AssertDatumsEqual(expected_chunked, actual_chunked); +} + +} // namespace arrow diff --git a/cpp/src/arrow/testing/random.cc b/cpp/src/arrow/testing/random.cc index b74c41f75e452..1386075397e20 100644 --- a/cpp/src/arrow/testing/random.cc +++ b/cpp/src/arrow/testing/random.cc @@ -363,13 +363,11 @@ std::shared_ptr RandomArrayGenerator::Decimal256(std::shared_ptr -static std::shared_ptr GenerateBinaryArray(RandomArrayGenerator* gen, int64_t size, - int32_t min_length, int32_t max_length, - double null_probability, - int64_t alignment, - MemoryPool* memory_pool) { - using offset_type = typename TypeClass::offset_type; +template +static std::shared_ptr GenerateBinaryArray( + RandomArrayGenerator* gen, int64_t size, int32_t min_length, int32_t max_length, + double null_probability, std::optional max_data_buffer_length, + int64_t alignment, MemoryPool* memory_pool) { using BuilderType = typename TypeTraits::BuilderType; using OffsetArrowType = typename CTypeTraits::ArrowType; using OffsetArrayType = typename TypeTraits::ArrayType; @@ -387,7 +385,12 @@ static std::shared_ptr GenerateBinaryArray(RandomArrayGenerator* gen, int /*null_probability=*/0); std::vector str_buffer(max_length); - BuilderType builder(memory_pool, alignment); + BuilderType builder{memory_pool, alignment}; + if constexpr (std::is_base_of_v) { + if (max_data_buffer_length) { + builder.SetBlockSize(*max_data_buffer_length); + } + } for (int64_t i = 0; i < size; ++i) { if (lengths->IsValid(i)) { @@ -409,7 +412,8 @@ std::shared_ptr RandomArrayGenerator::String(int64_t size, int32_t min_le int64_t alignment, MemoryPool* memory_pool) { return GenerateBinaryArray(this, size, min_length, max_length, - null_probability, alignment, memory_pool); + null_probability, /*max_data_buffer_length=*/{}, + alignment, memory_pool); } std::shared_ptr RandomArrayGenerator::LargeString(int64_t size, int32_t min_length, @@ -417,8 +421,9 @@ std::shared_ptr RandomArrayGenerator::LargeString(int64_t size, int32_t m double null_probability, int64_t alignment, MemoryPool* memory_pool) { - return GenerateBinaryArray(this, size, min_length, max_length, - null_probability, alignment, memory_pool); + return GenerateBinaryArray( + this, size, min_length, max_length, null_probability, /*max_data_buffer_length=*/{}, + alignment, memory_pool); } std::shared_ptr RandomArrayGenerator::BinaryWithRepeats( @@ -430,6 +435,15 @@ std::shared_ptr RandomArrayGenerator::BinaryWithRepeats( return *strings->View(binary()); } +std::shared_ptr RandomArrayGenerator::StringView( + int64_t size, int32_t min_length, int32_t max_length, double null_probability, + std::optional max_data_buffer_length, int64_t alignment, + MemoryPool* memory_pool) { + return GenerateBinaryArray( + this, size, min_length, max_length, null_probability, max_data_buffer_length, + alignment, memory_pool); +} + std::shared_ptr RandomArrayGenerator::StringWithRepeats( int64_t size, int64_t unique, int32_t min_length, int32_t max_length, double null_probability, int64_t alignment, MemoryPool* memory_pool) { @@ -843,6 +857,24 @@ std::shared_ptr RandomArrayGenerator::ArrayOf(const Field& field, int64_t ->View(field.type()); } + case Type::type::STRING_VIEW: + case Type::type::BINARY_VIEW: { + const auto min_length = + GetMetadata(field.metadata().get(), "min_length", 0); + const auto max_length = + GetMetadata(field.metadata().get(), "max_length", 20); + std::optional max_data_buffer_length = + GetMetadata(field.metadata().get(), "max_data_buffer_length", 0); + if (*max_data_buffer_length == 0) { + *max_data_buffer_length = {}; + } + + return StringView(length, min_length, max_length, null_probability, + max_data_buffer_length, alignment) + ->View(field.type()) + .ValueOrDie(); + } + case Type::type::DECIMAL128: return Decimal128(field.type(), length, null_probability, alignment, memory_pool); diff --git a/cpp/src/arrow/testing/random.h b/cpp/src/arrow/testing/random.h index de9ea6d05648d..cbdac3baa0109 100644 --- a/cpp/src/arrow/testing/random.h +++ b/cpp/src/arrow/testing/random.h @@ -367,6 +367,26 @@ class ARROW_TESTING_EXPORT RandomArrayGenerator { int64_t alignment = kDefaultBufferAlignment, MemoryPool* memory_pool = default_memory_pool()); + /// \brief Generate a random StringViewArray + /// + /// \param[in] size the size of the array to generate + /// \param[in] min_length the lower bound of the string length + /// determined by the uniform distribution + /// \param[in] max_length the upper bound of the string length + /// determined by the uniform distribution + /// \param[in] null_probability the probability of a value being null + /// \param[in] max_data_buffer_length the data buffer size at which + /// a new chunk will be generated + /// \param[in] alignment alignment for memory allocations (in bytes) + /// \param[in] memory_pool memory pool to allocate memory from + /// + /// \return a generated Array + std::shared_ptr StringView(int64_t size, int32_t min_length, int32_t max_length, + double null_probability = 0, + std::optional max_data_buffer_length = {}, + int64_t alignment = kDefaultBufferAlignment, + MemoryPool* memory_pool = default_memory_pool()); + /// \brief Generate a random LargeStringArray /// /// \param[in] size the size of the array to generate @@ -556,10 +576,14 @@ class ARROW_TESTING_EXPORT RandomArrayGenerator { /// - max_length (T::offset_type): the minimum length of the child to generate, /// default 1024 /// - /// For string and binary types T (not including their large variants): + /// For string and binary types T (not including their large or view variants): /// - unique (int32_t): if positive, this many distinct values will be generated /// and all array values will be one of these values, default -1 /// + /// For string and binary view types T: + /// - max_data_buffer_length (int64_t): the data buffer size at which a new chunk + /// will be generated, default 32KB + /// /// For MapType: /// - values (int32_t): the number of key-value pairs to generate, which will be /// partitioned among the array values. diff --git a/cpp/src/arrow/testing/random_test.cc b/cpp/src/arrow/testing/random_test.cc index f269818e83a3d..951b654e56f73 100644 --- a/cpp/src/arrow/testing/random_test.cc +++ b/cpp/src/arrow/testing/random_test.cc @@ -160,6 +160,7 @@ auto values = ::testing::Values( field("uint32", uint32()), field("int32", int32()), field("uint64", uint64()), field("int64", int64()), field("float16", float16()), field("float32", float32()), field("float64", float64()), field("string", utf8()), field("binary", binary()), + field("string_view", utf8_view()), field("binary_view", binary_view()), field("fixed_size_binary", fixed_size_binary(8)), field("decimal128", decimal128(8, 3)), field("decimal128", decimal128(29, -5)), field("decimal256", decimal256(16, 4)), field("decimal256", decimal256(57, -6)), diff --git a/cpp/src/arrow/type.cc b/cpp/src/arrow/type.cc index a4f43256827da..f378bd974047d 100644 --- a/cpp/src/arrow/type.cc +++ b/cpp/src/arrow/type.cc @@ -64,10 +64,14 @@ constexpr Type::type FixedSizeListType::type_id; constexpr Type::type BinaryType::type_id; +constexpr Type::type BinaryViewType::type_id; + constexpr Type::type LargeBinaryType::type_id; constexpr Type::type StringType::type_id; +constexpr Type::type StringViewType::type_id; + constexpr Type::type LargeStringType::type_id; constexpr Type::type FixedSizeBinaryType::type_id; @@ -130,6 +134,8 @@ std::vector AllTypeIds() { Type::BINARY, Type::LARGE_STRING, Type::LARGE_BINARY, + Type::STRING_VIEW, + Type::BINARY_VIEW, Type::FIXED_SIZE_BINARY, Type::STRUCT, Type::LIST, @@ -194,7 +200,9 @@ std::string ToString(Type::type id) { TO_STRING_CASE(INTERVAL_MONTHS) TO_STRING_CASE(DURATION) TO_STRING_CASE(STRING) + TO_STRING_CASE(STRING_VIEW) TO_STRING_CASE(BINARY) + TO_STRING_CASE(BINARY_VIEW) TO_STRING_CASE(LARGE_STRING) TO_STRING_CASE(LARGE_BINARY) TO_STRING_CASE(FIXED_SIZE_BINARY) @@ -247,7 +255,7 @@ struct PhysicalTypeVisitor { } template - Status Visit(const Type&) { + Status Visit(const Type& type) { result = TypeTraits::type_singleton(); return Status::OK(); } @@ -1058,10 +1066,14 @@ std::string FixedSizeListType::ToString() const { std::string BinaryType::ToString() const { return "binary"; } +std::string BinaryViewType::ToString() const { return "binary_view"; } + std::string LargeBinaryType::ToString() const { return "large_binary"; } std::string StringType::ToString() const { return "string"; } +std::string StringViewType::ToString() const { return "string_view"; } + std::string LargeStringType::ToString() const { return "large_string"; } int FixedSizeBinaryType::bit_width() const { return CHAR_BIT * byte_width(); } @@ -2821,8 +2833,10 @@ PARAMETER_LESS_FINGERPRINT(HalfFloat) PARAMETER_LESS_FINGERPRINT(Float) PARAMETER_LESS_FINGERPRINT(Double) PARAMETER_LESS_FINGERPRINT(Binary) +PARAMETER_LESS_FINGERPRINT(BinaryView) PARAMETER_LESS_FINGERPRINT(LargeBinary) PARAMETER_LESS_FINGERPRINT(String) +PARAMETER_LESS_FINGERPRINT(StringView) PARAMETER_LESS_FINGERPRINT(LargeString) PARAMETER_LESS_FINGERPRINT(Date32) PARAMETER_LESS_FINGERPRINT(Date64) @@ -3034,6 +3048,16 @@ TYPE_FACTORY(large_binary, LargeBinaryType) TYPE_FACTORY(date64, Date64Type) TYPE_FACTORY(date32, Date32Type) +const std::shared_ptr& utf8_view() { + static std::shared_ptr type = std::make_shared(); + return type; +} + +const std::shared_ptr& binary_view() { + static std::shared_ptr type = std::make_shared(); + return type; +} + std::shared_ptr fixed_size_binary(int32_t byte_width) { return std::make_shared(byte_width); } @@ -3294,7 +3318,7 @@ void InitStaticData() { // * Time32 // * Time64 // * Timestamp - g_primitive_types = {null(), boolean(), date32(), date64()}; + g_primitive_types = {null(), boolean(), date32(), date64(), binary_view(), utf8_view()}; Extend(g_numeric_types, &g_primitive_types); Extend(g_base_binary_types, &g_primitive_types); } diff --git a/cpp/src/arrow/type.h b/cpp/src/arrow/type.h index 3f4dd5c9b21fa..a905192e4a54e 100644 --- a/cpp/src/arrow/type.h +++ b/cpp/src/arrow/type.h @@ -23,6 +23,7 @@ #include #include #include +#include #include #include #include @@ -113,8 +114,14 @@ struct ARROW_EXPORT DataTypeLayout { std::vector buffers; /// Whether this type expects an associated dictionary array. bool has_dictionary = false; + /// If this is provided, the number of buffers expected is only lower-bounded by + /// buffers.size(). Buffers beyond this lower bound are expected to conform to + /// variadic_spec. + std::optional variadic_spec; - explicit DataTypeLayout(std::vector v) : buffers(std::move(v)) {} + explicit DataTypeLayout(std::vector buffers, + std::optional variadic_spec = {}) + : buffers(std::move(buffers)), variadic_spec(variadic_spec) {} }; /// \brief Base class for all data types @@ -772,6 +779,103 @@ class ARROW_EXPORT BinaryType : public BaseBinaryType { explicit BinaryType(Type::type logical_type) : BaseBinaryType(logical_type) {} }; +/// \brief Concrete type class for variable-size binary view data +class ARROW_EXPORT BinaryViewType : public DataType { + public: + static constexpr Type::type type_id = Type::BINARY_VIEW; + static constexpr bool is_utf8 = false; + using PhysicalType = BinaryViewType; + + static constexpr int kSize = 16; + static constexpr int kInlineSize = 12; + static constexpr int kPrefixSize = 4; + + /// Variable length string or binary with inline optimization for small values (12 bytes + /// or fewer). This is similar to std::string_view except limited in size to INT32_MAX + /// and at least the first four bytes of the string are copied inline (accessible + /// without pointer dereference). This inline prefix allows failing comparisons early. + /// Furthermore when dealing with short strings the CPU cache working set is reduced + /// since many can be inline. + /// + /// This union supports two states: + /// + /// - Entirely inlined string data + /// |----|--------------| + /// ^ ^ + /// | | + /// size in-line string data, zero padded + /// + /// - Reference into a buffer + /// |----|----|----|----| + /// ^ ^ ^ ^ + /// | | | | + /// size | | `------. + /// prefix | | + /// buffer index | + /// offset in buffer + /// + /// Adapted from TU Munich's UmbraDB [1], Velox, DuckDB. + /// + /// [1]: https://db.in.tum.de/~freitag/papers/p29-neumann-cidr20.pdf + /// + /// Alignment to 64 bits enables an aligned load of the size and prefix into + /// a single 64 bit integer, which is useful to the comparison fast path. + union alignas(int64_t) c_type { + struct { + int32_t size; + std::array data; + } inlined; + + struct { + int32_t size; + std::array prefix; + int32_t buffer_index; + int32_t offset; + } ref; + + /// The number of bytes viewed. + int32_t size() const { + // Size is in the common initial subsequence of each member of the union, + // so accessing `inlined.size` is legal even if another member is active. + return inlined.size; + } + + /// True if the view's data is entirely stored inline. + bool is_inline() const { return size() <= kInlineSize; } + + /// Return a pointer to the inline data of a view. + /// + /// For inline views, this points to the entire data of the view. + /// For other views, this points to the 4 byte prefix. + const uint8_t* inline_data() const& { + // Since `ref.prefix` has the same address as `inlined.data`, + // the branch will be trivially optimized out. + return is_inline() ? inlined.data.data() : ref.prefix.data(); + } + const uint8_t* inline_data() && = delete; + }; + static_assert(sizeof(c_type) == kSize); + static_assert(std::is_trivial_v); + + static constexpr const char* type_name() { return "binary_view"; } + + BinaryViewType() : BinaryViewType(Type::BINARY_VIEW) {} + + DataTypeLayout layout() const override { + return DataTypeLayout({DataTypeLayout::Bitmap(), DataTypeLayout::FixedWidth(kSize)}, + DataTypeLayout::VariableWidth()); + } + + std::string ToString() const override; + std::string name() const override { return "binary_view"; } + + protected: + std::string ComputeFingerprint() const override; + + // Allow subclasses like StringType to change the logical type. + explicit BinaryViewType(Type::type logical_type) : DataType(logical_type) {} +}; + /// \brief Concrete type class for large variable-size binary data class ARROW_EXPORT LargeBinaryType : public BaseBinaryType { public: @@ -818,6 +922,24 @@ class ARROW_EXPORT StringType : public BinaryType { std::string ComputeFingerprint() const override; }; +/// \brief Concrete type class for variable-size string data, utf8-encoded +class ARROW_EXPORT StringViewType : public BinaryViewType { + public: + static constexpr Type::type type_id = Type::STRING_VIEW; + static constexpr bool is_utf8 = true; + using PhysicalType = BinaryViewType; + + static constexpr const char* type_name() { return "utf8_view"; } + + StringViewType() : BinaryViewType(Type::STRING_VIEW) {} + + std::string ToString() const override; + std::string name() const override { return "utf8_view"; } + + protected: + std::string ComputeFingerprint() const override; +}; + /// \brief Concrete type class for large variable-size string data, utf8-encoded class ARROW_EXPORT LargeStringType : public LargeBinaryType { public: diff --git a/cpp/src/arrow/type_fwd.h b/cpp/src/arrow/type_fwd.h index 499684719feab..ca263b710317b 100644 --- a/cpp/src/arrow/type_fwd.h +++ b/cpp/src/arrow/type_fwd.h @@ -110,6 +110,11 @@ class BinaryArray; class BinaryBuilder; struct BinaryScalar; +class BinaryViewType; +class BinaryViewArray; +class BinaryViewBuilder; +struct BinaryViewScalar; + class LargeBinaryType; class LargeBinaryArray; class LargeBinaryBuilder; @@ -125,6 +130,11 @@ class StringArray; class StringBuilder; struct StringScalar; +class StringViewType; +class StringViewArray; +class StringViewBuilder; +struct StringViewScalar; + class LargeStringType; class LargeStringArray; class LargeStringBuilder; @@ -415,6 +425,13 @@ struct Type { /// Run-end encoded data. RUN_END_ENCODED = 38, + /// String (UTF8) view type with 4-byte prefix and inline small string + /// optimization + STRING_VIEW = 39, + + /// Bytes view type with 4-byte prefix and inline small string optimization + BINARY_VIEW = 40, + // Leave this at the end MAX_ID }; @@ -456,10 +473,14 @@ ARROW_EXPORT const std::shared_ptr& float32(); ARROW_EXPORT const std::shared_ptr& float64(); /// \brief Return a StringType instance ARROW_EXPORT const std::shared_ptr& utf8(); +/// \brief Return a StringViewType instance +ARROW_EXPORT const std::shared_ptr& utf8_view(); /// \brief Return a LargeStringType instance ARROW_EXPORT const std::shared_ptr& large_utf8(); /// \brief Return a BinaryType instance ARROW_EXPORT const std::shared_ptr& binary(); +/// \brief Return a BinaryViewType instance +ARROW_EXPORT const std::shared_ptr& binary_view(); /// \brief Return a LargeBinaryType instance ARROW_EXPORT const std::shared_ptr& large_binary(); /// \brief Return a Date32Type instance diff --git a/cpp/src/arrow/type_test.cc b/cpp/src/arrow/type_test.cc index 9ba8cf98dea4f..273f8933fa577 100644 --- a/cpp/src/arrow/type_test.cc +++ b/cpp/src/arrow/type_test.cc @@ -1469,9 +1469,21 @@ TEST(TestBinaryType, ToString) { TEST(TestStringType, ToString) { StringType str; ASSERT_EQ(str.id(), Type::STRING); + ASSERT_EQ(str.name(), std::string("utf8")); + ASSERT_EQ(str.type_name(), std::string("utf8")); ASSERT_EQ(str.ToString(), std::string("string")); } +TEST(TestBinaryViewType, ToString) { + BinaryViewType t1; + BinaryViewType e1; + StringViewType t2; + AssertTypeEqual(t1, e1); + AssertTypeNotEqual(t1, t2); + ASSERT_EQ(t1.id(), Type::BINARY_VIEW); + ASSERT_EQ(t1.ToString(), std::string("binary_view")); +} + TEST(TestLargeBinaryTypes, ToString) { BinaryType bt1; LargeBinaryType t1; diff --git a/cpp/src/arrow/type_traits.cc b/cpp/src/arrow/type_traits.cc index ac16afe4b8cd8..de328f322ad5f 100644 --- a/cpp/src/arrow/type_traits.cc +++ b/cpp/src/arrow/type_traits.cc @@ -88,6 +88,8 @@ int RequiredValueAlignmentForBuffer(Type::type type_id, int buffer_index) { case Type::DURATION: case Type::INTERVAL_MONTH_DAY_NANO: // Stored as two 32-bit integers and a 64-bit // integer + case Type::STRING_VIEW: + case Type::BINARY_VIEW: return 8; case Type::DICTIONARY: case Type::EXTENSION: diff --git a/cpp/src/arrow/type_traits.h b/cpp/src/arrow/type_traits.h index bcbde23ae4a4b..9d8cafacf397b 100644 --- a/cpp/src/arrow/type_traits.h +++ b/cpp/src/arrow/type_traits.h @@ -341,6 +341,16 @@ struct TypeTraits { static inline std::shared_ptr type_singleton() { return binary(); } }; +template <> +struct TypeTraits { + using ArrayType = BinaryViewArray; + using BuilderType = BinaryViewBuilder; + using ScalarType = BinaryViewScalar; + using CType = BinaryViewType::c_type; + constexpr static bool is_parameter_free = true; + static inline std::shared_ptr type_singleton() { return binary_view(); } +}; + template <> struct TypeTraits { using ArrayType = LargeBinaryArray; @@ -371,6 +381,16 @@ struct TypeTraits { static inline std::shared_ptr type_singleton() { return utf8(); } }; +template <> +struct TypeTraits { + using ArrayType = StringViewArray; + using BuilderType = StringViewBuilder; + using ScalarType = StringViewScalar; + using CType = BinaryViewType::c_type; + constexpr static bool is_parameter_free = true; + static inline std::shared_ptr type_singleton() { return utf8_view(); } +}; + template <> struct TypeTraits { using ArrayType = LargeStringArray; @@ -399,6 +419,11 @@ struct CTypeTraits : public TypeTraits { using ArrowType = StringType; }; +template <> +struct CTypeTraits : public TypeTraits { + using ArrowType = BinaryViewType; +}; + template <> struct CTypeTraits : public CTypeTraits {}; @@ -614,6 +639,24 @@ using is_string_type = template using enable_if_string = enable_if_t::value, R>; +template +using is_binary_view_like_type = std::is_base_of; + +template +using is_binary_view_type = std::is_same; + +template +using is_string_view_type = std::is_same; + +template +using enable_if_binary_view_like = enable_if_t::value, R>; + +template +using enable_if_binary_view = enable_if_t::value, R>; + +template +using enable_if_string_view = enable_if_t::value, R>; + template using is_string_like_type = std::integral_constant::value && T::is_utf8>; @@ -801,8 +844,10 @@ using enable_if_has_c_type = enable_if_t::value, R>; template using has_string_view = std::integral_constant::value || + std::is_same::value || std::is_same::value || std::is_same::value || + std::is_same::value || std::is_same::value || std::is_same::value>; diff --git a/cpp/src/arrow/util/CMakeLists.txt b/cpp/src/arrow/util/CMakeLists.txt index 3cecab3a633cc..2e9487dcf50c8 100644 --- a/cpp/src/arrow/util/CMakeLists.txt +++ b/cpp/src/arrow/util/CMakeLists.txt @@ -43,10 +43,12 @@ add_arrow_test(utility-test align_util_test.cc atfork_test.cc byte_size_test.cc + byte_stream_split_test.cc cache_test.cc checked_cast_test.cc compression_test.cc decimal_test.cc + float16_test.cc formatting_util_test.cc key_value_metadata_test.cc hashing_test.cc diff --git a/cpp/src/arrow/util/binary_view_util.h b/cpp/src/arrow/util/binary_view_util.h new file mode 100644 index 0000000000000..94f7a5bdfa667 --- /dev/null +++ b/cpp/src/arrow/util/binary_view_util.h @@ -0,0 +1,95 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#pragma once + +#include +#include + +#include "arrow/type.h" +#include "arrow/util/span.h" + +namespace arrow::util { + +inline BinaryViewType::c_type ToInlineBinaryView(const void* data, int32_t size) { + // Small string: inlined. Bytes beyond size are zeroed + BinaryViewType::c_type out; + out.inlined = {size, {}}; + memcpy(&out.inlined.data, data, size); + return out; +} + +inline BinaryViewType::c_type ToInlineBinaryView(std::string_view v) { + return ToInlineBinaryView(v.data(), static_cast(v.size())); +} + +inline BinaryViewType::c_type ToBinaryView(const void* data, int32_t size, + int32_t buffer_index, int32_t offset) { + if (size <= BinaryViewType::kInlineSize) { + return ToInlineBinaryView(data, size); + } + + // Large string: store index/offset. + BinaryViewType::c_type out; + out.ref = {size, {}, buffer_index, offset}; + memcpy(&out.ref.prefix, data, sizeof(out.ref.prefix)); + return out; +} + +inline BinaryViewType::c_type ToBinaryView(std::string_view v, int32_t buffer_index, + int32_t offset) { + return ToBinaryView(v.data(), static_cast(v.size()), buffer_index, offset); +} + +template +std::string_view FromBinaryView(const BinaryViewType::c_type& v, + const BufferPtr* data_buffers) { + auto* data = v.is_inline() ? v.inlined.data.data() + : data_buffers[v.ref.buffer_index]->data() + v.ref.offset; + return {reinterpret_cast(data), static_cast(v.size())}; +} +template +std::string_view FromBinaryView(BinaryViewType::c_type&&, const BufferPtr*) = delete; + +template +bool EqualBinaryView(BinaryViewType::c_type l, BinaryViewType::c_type r, + const BufferPtr* l_buffers, const BufferPtr* r_buffers) { + int64_t l_size_and_prefix, r_size_and_prefix; + memcpy(&l_size_and_prefix, &l, sizeof(l_size_and_prefix)); + memcpy(&r_size_and_prefix, &r, sizeof(r_size_and_prefix)); + + if (l_size_and_prefix != r_size_and_prefix) return false; + + if (l.is_inline()) { + // The columnar spec mandates that the inlined part be zero-padded, so we can compare + // a word at a time regardless of the exact size. + int64_t l_inlined, r_inlined; + memcpy(&l_inlined, l.inline_data() + BinaryViewType::kPrefixSize, sizeof(l_inlined)); + memcpy(&r_inlined, r.inline_data() + BinaryViewType::kPrefixSize, sizeof(r_inlined)); + return l_inlined == r_inlined; + } + + // Sizes are equal and this is not inline, therefore both are out + // of line and have kPrefixSize first in common. + const uint8_t* l_data = l_buffers[l.ref.buffer_index]->data() + l.ref.offset; + const uint8_t* r_data = r_buffers[r.ref.buffer_index]->data() + r.ref.offset; + return memcmp(l_data + BinaryViewType::kPrefixSize, + r_data + BinaryViewType::kPrefixSize, + l.size() - BinaryViewType::kPrefixSize) == 0; +} + +} // namespace arrow::util diff --git a/cpp/src/arrow/util/byte_stream_split.h b/cpp/src/arrow/util/byte_stream_split_internal.h similarity index 84% rename from cpp/src/arrow/util/byte_stream_split.h rename to cpp/src/arrow/util/byte_stream_split_internal.h index d428df0659b28..ae85e2cfa81a3 100644 --- a/cpp/src/arrow/util/byte_stream_split.h +++ b/cpp/src/arrow/util/byte_stream_split_internal.h @@ -17,20 +17,24 @@ #pragma once +#include "arrow/util/endian.h" #include "arrow/util/simd.h" #include "arrow/util/ubsan.h" -#include #include +#include +#include #ifdef ARROW_HAVE_SSE4_2 // Enable the SIMD for ByteStreamSplit Encoder/Decoder #define ARROW_HAVE_SIMD_SPLIT #endif // ARROW_HAVE_SSE4_2 -namespace arrow { -namespace util { -namespace internal { +namespace arrow::util::internal { + +// +// SIMD implementations +// #if defined(ARROW_HAVE_SSE4_2) template @@ -565,48 +569,140 @@ void inline ByteStreamSplitDecodeSimd(const uint8_t* data, int64_t num_values, } template -void inline ByteStreamSplitEncodeSimd(const uint8_t* raw_values, const size_t num_values, +void inline ByteStreamSplitEncodeSimd(const uint8_t* raw_values, const int64_t num_values, uint8_t* output_buffer_raw) { #if defined(ARROW_HAVE_AVX512) - return ByteStreamSplitEncodeAvx512(raw_values, num_values, output_buffer_raw); + return ByteStreamSplitEncodeAvx512(raw_values, static_cast(num_values), + output_buffer_raw); #elif defined(ARROW_HAVE_AVX2) - return ByteStreamSplitEncodeAvx2(raw_values, num_values, output_buffer_raw); + return ByteStreamSplitEncodeAvx2(raw_values, static_cast(num_values), + output_buffer_raw); #elif defined(ARROW_HAVE_SSE4_2) - return ByteStreamSplitEncodeSse2(raw_values, num_values, output_buffer_raw); + return ByteStreamSplitEncodeSse2(raw_values, static_cast(num_values), + output_buffer_raw); #else #error "ByteStreamSplitEncodeSimd not implemented" #endif } #endif +// +// Scalar implementations +// + +inline void DoSplitStreams(const uint8_t* src, int width, int64_t nvalues, + uint8_t** dest_streams) { + // Value empirically chosen to provide the best performance on the author's machine + constexpr int kBlockSize = 32; + + while (nvalues >= kBlockSize) { + for (int stream = 0; stream < width; ++stream) { + uint8_t* dest = dest_streams[stream]; + for (int i = 0; i < kBlockSize; i += 8) { + uint64_t a = src[stream + i * width]; + uint64_t b = src[stream + (i + 1) * width]; + uint64_t c = src[stream + (i + 2) * width]; + uint64_t d = src[stream + (i + 3) * width]; + uint64_t e = src[stream + (i + 4) * width]; + uint64_t f = src[stream + (i + 5) * width]; + uint64_t g = src[stream + (i + 6) * width]; + uint64_t h = src[stream + (i + 7) * width]; +#if ARROW_LITTLE_ENDIAN + uint64_t r = a | (b << 8) | (c << 16) | (d << 24) | (e << 32) | (f << 40) | + (g << 48) | (h << 56); +#else + uint64_t r = (a << 56) | (b << 48) | (c << 40) | (d << 32) | (e << 24) | + (f << 16) | (g << 8) | h; +#endif + arrow::util::SafeStore(&dest[i], r); + } + dest_streams[stream] += kBlockSize; + } + src += width * kBlockSize; + nvalues -= kBlockSize; + } + + // Epilog + for (int stream = 0; stream < width; ++stream) { + uint8_t* dest = dest_streams[stream]; + for (int64_t i = 0; i < nvalues; ++i) { + dest[i] = src[stream + i * width]; + } + } +} + +inline void DoMergeStreams(const uint8_t** src_streams, int width, int64_t nvalues, + uint8_t* dest) { + // Value empirically chosen to provide the best performance on the author's machine + constexpr int kBlockSize = 128; + + while (nvalues >= kBlockSize) { + for (int stream = 0; stream < width; ++stream) { + // Take kBlockSize bytes from the given stream and spread them + // to their logical places in destination. + const uint8_t* src = src_streams[stream]; + for (int i = 0; i < kBlockSize; i += 8) { + uint64_t v = arrow::util::SafeLoadAs(&src[i]); +#if ARROW_LITTLE_ENDIAN + dest[stream + i * width] = static_cast(v); + dest[stream + (i + 1) * width] = static_cast(v >> 8); + dest[stream + (i + 2) * width] = static_cast(v >> 16); + dest[stream + (i + 3) * width] = static_cast(v >> 24); + dest[stream + (i + 4) * width] = static_cast(v >> 32); + dest[stream + (i + 5) * width] = static_cast(v >> 40); + dest[stream + (i + 6) * width] = static_cast(v >> 48); + dest[stream + (i + 7) * width] = static_cast(v >> 56); +#else + dest[stream + i * width] = static_cast(v >> 56); + dest[stream + (i + 1) * width] = static_cast(v >> 48); + dest[stream + (i + 2) * width] = static_cast(v >> 40); + dest[stream + (i + 3) * width] = static_cast(v >> 32); + dest[stream + (i + 4) * width] = static_cast(v >> 24); + dest[stream + (i + 5) * width] = static_cast(v >> 16); + dest[stream + (i + 6) * width] = static_cast(v >> 8); + dest[stream + (i + 7) * width] = static_cast(v); +#endif + } + src_streams[stream] += kBlockSize; + } + dest += width * kBlockSize; + nvalues -= kBlockSize; + } + + // Epilog + for (int stream = 0; stream < width; ++stream) { + const uint8_t* src = src_streams[stream]; + for (int64_t i = 0; i < nvalues; ++i) { + dest[stream + i * width] = src[i]; + } + } +} + template -void ByteStreamSplitEncodeScalar(const uint8_t* raw_values, const size_t num_values, +void ByteStreamSplitEncodeScalar(const uint8_t* raw_values, const int64_t num_values, uint8_t* output_buffer_raw) { - constexpr size_t kNumStreams = sizeof(T); - for (size_t i = 0U; i < num_values; ++i) { - for (size_t j = 0U; j < kNumStreams; ++j) { - const uint8_t byte_in_value = raw_values[i * kNumStreams + j]; - output_buffer_raw[j * num_values + i] = byte_in_value; - } + constexpr int kNumStreams = static_cast(sizeof(T)); + std::array dest_streams; + for (int stream = 0; stream < kNumStreams; ++stream) { + dest_streams[stream] = &output_buffer_raw[stream * num_values]; } + DoSplitStreams(raw_values, kNumStreams, num_values, dest_streams.data()); } template void ByteStreamSplitDecodeScalar(const uint8_t* data, int64_t num_values, int64_t stride, T* out) { - constexpr size_t kNumStreams = sizeof(T); - auto output_buffer_raw = reinterpret_cast(out); - - for (int64_t i = 0; i < num_values; ++i) { - for (size_t b = 0; b < kNumStreams; ++b) { - const size_t byte_index = b * stride + i; - output_buffer_raw[i * kNumStreams + b] = data[byte_index]; - } + constexpr int kNumStreams = static_cast(sizeof(T)); + std::array src_streams; + for (int stream = 0; stream < kNumStreams; ++stream) { + src_streams[stream] = &data[stream * stride]; } + DoMergeStreams(src_streams.data(), kNumStreams, num_values, + reinterpret_cast(out)); } template -void inline ByteStreamSplitEncode(const uint8_t* raw_values, const size_t num_values, +void inline ByteStreamSplitEncode(const uint8_t* raw_values, const int64_t num_values, uint8_t* output_buffer_raw) { #if defined(ARROW_HAVE_SIMD_SPLIT) return ByteStreamSplitEncodeSimd(raw_values, num_values, output_buffer_raw); @@ -625,6 +721,4 @@ void inline ByteStreamSplitDecode(const uint8_t* data, int64_t num_values, int64 #endif } -} // namespace internal -} // namespace util -} // namespace arrow +} // namespace arrow::util::internal diff --git a/cpp/src/arrow/util/byte_stream_split_test.cc b/cpp/src/arrow/util/byte_stream_split_test.cc new file mode 100644 index 0000000000000..3ea27f57da881 --- /dev/null +++ b/cpp/src/arrow/util/byte_stream_split_test.cc @@ -0,0 +1,172 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "arrow/testing/gtest_util.h" +#include "arrow/testing/util.h" +#include "arrow/util/byte_stream_split_internal.h" + +namespace arrow::util::internal { + +using ByteStreamSplitTypes = ::testing::Types; + +template +struct NamedFunc { + std::string name; + Func func; + + friend std::ostream& operator<<(std::ostream& os, const NamedFunc& func) { + os << func.name; + return os; + } +}; + +// A simplistic reference implementation for validation +void RefererenceByteStreamSplitEncode(const uint8_t* src, int width, + const int64_t num_values, uint8_t* dest) { + for (int64_t i = 0; i < num_values; ++i) { + for (int stream = 0; stream < width; ++stream) { + dest[stream * num_values + i] = *src++; + } + } +} + +template +class TestByteStreamSplitSpecialized : public ::testing::Test { + public: + using EncodeFunc = NamedFunc)>>; + using DecodeFunc = NamedFunc)>>; + + static constexpr int kWidth = static_cast(sizeof(T)); + + void SetUp() override { + encode_funcs_.push_back({"reference", &ReferenceEncode}); + encode_funcs_.push_back({"scalar", &ByteStreamSplitEncodeScalar}); + decode_funcs_.push_back({"scalar", &ByteStreamSplitDecodeScalar}); +#if defined(ARROW_HAVE_SIMD_SPLIT) + encode_funcs_.push_back({"simd", &ByteStreamSplitEncodeSimd}); + decode_funcs_.push_back({"simd", &ByteStreamSplitDecodeSimd}); +#endif + } + + void TestRoundtrip(int64_t num_values) { + // Test one-shot roundtrip among all encode/decode function combinations + ARROW_SCOPED_TRACE("num_values = ", num_values); + const auto input = MakeRandomInput(num_values); + std::vector encoded(num_values * kWidth); + std::vector decoded(num_values); + + for (const auto& encode_func : encode_funcs_) { + ARROW_SCOPED_TRACE("encode_func = ", encode_func); + encoded.assign(encoded.size(), 0); + encode_func.func(reinterpret_cast(input.data()), num_values, + encoded.data()); + for (const auto& decode_func : decode_funcs_) { + ARROW_SCOPED_TRACE("decode_func = ", decode_func); + decoded.assign(decoded.size(), T{}); + decode_func.func(encoded.data(), num_values, /*stride=*/num_values, + decoded.data()); + ASSERT_EQ(decoded, input); + } + } + } + + void TestPiecewiseDecode(int64_t num_values) { + // Test chunked decoding against the reference encode function + ARROW_SCOPED_TRACE("num_values = ", num_values); + const auto input = MakeRandomInput(num_values); + std::vector encoded(num_values * kWidth); + ReferenceEncode(reinterpret_cast(input.data()), num_values, + encoded.data()); + std::vector decoded(num_values); + + std::default_random_engine gen(seed_++); + std::uniform_int_distribution chunk_size_dist(1, 123); + + for (const auto& decode_func : decode_funcs_) { + ARROW_SCOPED_TRACE("decode_func = ", decode_func); + decoded.assign(decoded.size(), T{}); + + int64_t offset = 0; + while (offset < num_values) { + auto chunk_size = std::min(num_values - offset, chunk_size_dist(gen)); + decode_func.func(encoded.data() + offset, chunk_size, /*stride=*/num_values, + decoded.data() + offset); + offset += chunk_size; + } + ASSERT_EQ(offset, num_values); + ASSERT_EQ(decoded, input); + } + } + + protected: + static void ReferenceEncode(const uint8_t* raw_values, const int64_t num_values, + uint8_t* output_buffer_raw) { + RefererenceByteStreamSplitEncode(raw_values, kWidth, num_values, output_buffer_raw); + } + + static std::vector MakeRandomInput(int64_t num_values) { + std::vector input(num_values); + random_bytes(kWidth * num_values, seed_++, reinterpret_cast(input.data())); + // Avoid NaNs to ease comparison + for (auto& value : input) { + if (std::isnan(value)) { + value = nan_replacement_++; + } + } + return input; + } + + std::vector encode_funcs_; + std::vector decode_funcs_; + + static inline uint32_t seed_ = 42; + static inline T nan_replacement_ = 0; +}; + +TYPED_TEST_SUITE(TestByteStreamSplitSpecialized, ByteStreamSplitTypes); + +TYPED_TEST(TestByteStreamSplitSpecialized, RoundtripSmall) { + for (int64_t num_values : {1, 5, 7, 12, 19, 31, 32}) { + this->TestRoundtrip(num_values); + } +} + +TYPED_TEST(TestByteStreamSplitSpecialized, RoundtripMidsized) { + for (int64_t num_values : {126, 127, 128, 129, 133, 200}) { + this->TestRoundtrip(num_values); + } +} + +TYPED_TEST(TestByteStreamSplitSpecialized, PiecewiseDecode) { + this->TestPiecewiseDecode(/*num_values=*/500); +} + +} // namespace arrow::util::internal diff --git a/cpp/src/arrow/util/compression_benchmark.cc b/cpp/src/arrow/util/compression_benchmark.cc index c76be275f426c..cc04eb4634851 100644 --- a/cpp/src/arrow/util/compression_benchmark.cc +++ b/cpp/src/arrow/util/compression_benchmark.cc @@ -30,8 +30,7 @@ #include "arrow/util/logging.h" #include "arrow/util/macros.h" -namespace arrow { -namespace util { +namespace arrow::util { #ifdef ARROW_WITH_BENCHMARKS_REFERENCE @@ -133,6 +132,37 @@ static void ReferenceStreamingCompression( StreamingCompression(COMPRESSION, data, state); } +int64_t Compress(Codec* codec, const std::vector& data, + std::vector* compressed_data) { + const uint8_t* input = data.data(); + int64_t input_len = data.size(); + int64_t compressed_size = 0; + int64_t max_compressed_len = codec->MaxCompressedLen(input_len, input); + compressed_data->resize(max_compressed_len); + + if (input_len > 0) { + compressed_size = *codec->Compress(input_len, input, compressed_data->size(), + compressed_data->data()); + compressed_data->resize(compressed_size); + } + return compressed_size; +} + +template +static void ReferenceCompression(benchmark::State& state) { // NOLINT non-const reference + auto data = MakeCompressibleData(8 * 1024 * 1024); // 8 MB + + auto codec = *Codec::Create(COMPRESSION); + + while (state.KeepRunning()) { + std::vector compressed_data; + auto compressed_size = Compress(codec.get(), data, &compressed_data); + state.counters["ratio"] = + static_cast(data.size()) / static_cast(compressed_size); + } + state.SetBytesProcessed(state.iterations() * data.size()); +} + static void StreamingDecompression( Compression::type compression, const std::vector& data, benchmark::State& state) { // NOLINT non-const reference @@ -175,27 +205,64 @@ static void ReferenceStreamingDecompression( StreamingDecompression(COMPRESSION, data, state); } +template +static void ReferenceDecompression( + benchmark::State& state) { // NOLINT non-const reference + auto data = MakeCompressibleData(8 * 1024 * 1024); // 8 MB + + auto codec = *Codec::Create(COMPRESSION); + + std::vector compressed_data; + ARROW_UNUSED(Compress(codec.get(), data, &compressed_data)); + state.counters["ratio"] = + static_cast(data.size()) / static_cast(compressed_data.size()); + + std::vector decompressed_data(data); + while (state.KeepRunning()) { + auto result = codec->Decompress(compressed_data.size(), compressed_data.data(), + decompressed_data.size(), decompressed_data.data()); + ARROW_CHECK(result.ok()); + ARROW_CHECK(*result == static_cast(decompressed_data.size())); + } + state.SetBytesProcessed(state.iterations() * data.size()); +} + #ifdef ARROW_WITH_ZLIB BENCHMARK_TEMPLATE(ReferenceStreamingCompression, Compression::GZIP); +BENCHMARK_TEMPLATE(ReferenceCompression, Compression::GZIP); BENCHMARK_TEMPLATE(ReferenceStreamingDecompression, Compression::GZIP); +BENCHMARK_TEMPLATE(ReferenceDecompression, Compression::GZIP); #endif #ifdef ARROW_WITH_BROTLI BENCHMARK_TEMPLATE(ReferenceStreamingCompression, Compression::BROTLI); +BENCHMARK_TEMPLATE(ReferenceCompression, Compression::BROTLI); BENCHMARK_TEMPLATE(ReferenceStreamingDecompression, Compression::BROTLI); +BENCHMARK_TEMPLATE(ReferenceDecompression, Compression::BROTLI); #endif #ifdef ARROW_WITH_ZSTD BENCHMARK_TEMPLATE(ReferenceStreamingCompression, Compression::ZSTD); +BENCHMARK_TEMPLATE(ReferenceCompression, Compression::ZSTD); BENCHMARK_TEMPLATE(ReferenceStreamingDecompression, Compression::ZSTD); +BENCHMARK_TEMPLATE(ReferenceDecompression, Compression::ZSTD); #endif #ifdef ARROW_WITH_LZ4 BENCHMARK_TEMPLATE(ReferenceStreamingCompression, Compression::LZ4_FRAME); +BENCHMARK_TEMPLATE(ReferenceCompression, Compression::LZ4_FRAME); BENCHMARK_TEMPLATE(ReferenceStreamingDecompression, Compression::LZ4_FRAME); +BENCHMARK_TEMPLATE(ReferenceDecompression, Compression::LZ4_FRAME); + +BENCHMARK_TEMPLATE(ReferenceCompression, Compression::LZ4); +BENCHMARK_TEMPLATE(ReferenceDecompression, Compression::LZ4); +#endif + +#ifdef ARROW_WITH_SNAPPY +BENCHMARK_TEMPLATE(ReferenceCompression, Compression::SNAPPY); +BENCHMARK_TEMPLATE(ReferenceDecompression, Compression::SNAPPY); #endif #endif -} // namespace util -} // namespace arrow +} // namespace arrow::util diff --git a/cpp/src/arrow/util/decimal.cc b/cpp/src/arrow/util/decimal.cc index 704b6bb9d491d..13709aa2f0cde 100644 --- a/cpp/src/arrow/util/decimal.cc +++ b/cpp/src/arrow/util/decimal.cc @@ -868,7 +868,7 @@ Result Decimal256::FromBigEndian(const uint8_t* bytes, int32_t lengt std::array little_endian_array; if (ARROW_PREDICT_FALSE(length < kMinDecimalBytes || length > kMaxDecimalBytes)) { - return Status::Invalid("Length of byte array passed to Decimal128::FromBigEndian ", + return Status::Invalid("Length of byte array passed to Decimal256::FromBigEndian ", "was ", length, ", but must be between ", kMinDecimalBytes, " and ", kMaxDecimalBytes); } diff --git a/cpp/src/arrow/util/float16.cc b/cpp/src/arrow/util/float16.cc new file mode 100644 index 0000000000000..5c8b3d10ca0cd --- /dev/null +++ b/cpp/src/arrow/util/float16.cc @@ -0,0 +1,226 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include +#include + +#include "arrow/util/float16.h" +#include "arrow/util/ubsan.h" + +namespace arrow { +namespace util { + +namespace { + +// -------------------------------------------------------- +// Binary conversions +// -------------------------------------------------------- +// These routines are partially adapted from Numpy's C implementation +// +// Some useful metrics for conversions between different precisions: +// |-----------------------------------------| +// | precision | half | single | double | +// |-----------------------------------------| +// | mantissa | 10 bits | 23 bits | 52 bits | +// | exponent | 5 bits | 8 bits | 11 bits | +// | sign | 1 bit | 1 bit | 1 bit | +// | exp bias | 15 | 127 | 1023 | +// |-----------------------------------------| + +template +struct BinaryConverter { + static_assert(std::is_same_v || std::is_same_v); + + static constexpr int kNumBits = sizeof(T) * 8; + static constexpr int kMantNumBits = (kNumBits == 32) ? 23 : 52; + static constexpr int kExpNumBits = kNumBits - kMantNumBits - 1; + + static constexpr int kExpBias = (1 << (kExpNumBits - 1)) - 1; + + static constexpr T kMantMask = (T(1) << kMantNumBits) - 1; + static constexpr T kExpMask = ((T(1) << kExpNumBits) - 1) << kMantNumBits; + static constexpr T kSignMask = T(1) << (kNumBits - 1); + + static_assert(kMantNumBits + kExpNumBits + 1 == kNumBits); + static_assert(kSignMask + kExpMask + kMantMask == ~T(0)); + + static uint16_t ToBinary16(T); + static T FromBinary16(uint16_t); +}; + +// Converts a IEEE binary32/64 into a binary16. Rounds to nearest with ties to zero +template +uint16_t BinaryConverter::ToBinary16(T f_bits) { + // Sign mask for output binary16 + const uint16_t h_sign = uint16_t((f_bits >> (kNumBits - 16)) & 0x8000); + + // Exponent mask for input binary + const T f_exp = f_bits & kExpMask; + // Exponents as signed pre-shifted values for convenience. Here, we need to re-bias the + // exponent for a binary16. If, after re-biasing, the binary16 exponent falls outside of + // the range [1,30] then we need to handle the under/overflow case specially. + const int16_t f_biased_exp = int16_t(f_exp >> kMantNumBits); + const int16_t unbiased_exp = f_biased_exp - kExpBias; + const int16_t h_biased_exp = unbiased_exp + 15; + + // Mantissa mask for input + const T f_mant = f_bits & kMantMask; + + // We define a "rounding bit", which is the most significant bit to be dropped + // (e.g. for a binary32, 0x1000). + constexpr T rounding_bit = T(1) << (kMantNumBits - (10 + 1)); + + // Handle exponent overflow, NaN, and +/-Inf + if (h_biased_exp >= 0x1f) { + // The input is a NaN representation + if (f_exp == kExpMask && f_mant != 0) { + uint16_t h_mant = uint16_t(f_mant >> (kMantNumBits - 10)); + // If the mantissa bit(s) indicating NaN were shifted out, add one back. Otherwise, + // the result would be infinity. + if (h_mant == 0) { + h_mant = 0x1; + } + return uint16_t(h_sign | 0x7c00u | h_mant); + } + + // Clamp to +/-infinity + return uint16_t(h_sign | 0x7c00u); + } + + // Handle exponent underflow, subnormals, and +/-0 + if (h_biased_exp <= 0) { + // If the underflow exceeds the number of bits in a binary16 mantissa (10) then we + // can't round, so just clamp to 0. Note that this also weeds out any input values + // that are subnormal - including +/-0; + if (h_biased_exp < -10) { + return h_sign; + } + + // Convert to a rounded subnormal value starting with the mantissa. Since the input + // input is known to be normal at this point, we need to prepend its implicit leading + // bit - which also necessitates an additional right-shift. + T rounded_mant = (T(1) << kMantNumBits) | f_mant; + rounded_mant >>= (1 - h_biased_exp); + + // Here, we implement rounding to nearest (with ties to even) + // + // By now, our new mantissa has two conceptual ranges: + // - The lower 13 bits, which will be shifted out + // - The upper 10 bits, which will become the binary16's mantissa + // + // "Rounding to nearest" basically just means that we add 1 to the rounding bit. If + // it's set, then the bit will cascade upwards into the 10-bit mantissa (and + // potentially the exponent). The only time where we may NOT do this is when a "tie" + // occurs - i.e. when the rounding bit is set but all of the lower bits are 0. In that + // case, we don't add 1 if the retained mantissa is "even" (its least significant bit + // is 0). + if ((rounded_mant & ((rounding_bit << 2) - 1)) != rounding_bit || + (f_mant & 0x7ffu) != 0) { + rounded_mant += rounding_bit; + } + + const uint16_t h_mant = uint16_t(rounded_mant >> (kMantNumBits - 10)); + return h_sign + h_mant; + } + + const uint16_t h_exp = uint16_t(h_biased_exp) << 10; + + // See comment on rounding behavior above + T rounded_mant = f_mant; + if ((rounded_mant & ((rounding_bit << 2) - 1)) != rounding_bit) { + rounded_mant += rounding_bit; + } + + const uint16_t h_mant = uint16_t(rounded_mant >> (kMantNumBits - 10)); + // Note that we ADD (rather than OR) the components because we want the carryover bit + // from rounding the mantissa to cascade through the exponent (it shouldn't affect the + // sign bit though). + return h_sign + h_exp + h_mant; +} + +// Converts a IEEE binary16 into a binary32/64 +template +T BinaryConverter::FromBinary16(uint16_t h_bits) { + // Sign mask for output + const T f_sign = T(h_bits & 0x8000u) << (kNumBits - 16); + + // Exponent mask for input binary16 + const uint16_t h_exp = h_bits & 0x7c00; + // Mantissa mask for input binary16 + const uint16_t h_mant = h_bits & 0x3ffu; + + switch (h_exp) { + // Handle Inf and NaN + case 0x7c00u: + return f_sign | kExpMask | (T(h_mant) << (kMantNumBits - 10)); + // Handle zeros and subnormals + case 0x0000u: { + // Input is +/-0 + if (h_mant == 0) { + return f_sign; + } + // Subnormal binary16 to normal binary32/64 + // + // Start with an f32/64-biased exponent of 2^-15. We then decrement it until the + // most significant set bit is left-shifted out - as it doesn't get explicitly + // stored in normalized floating point values. Instead, its existence is implied by + // the new exponent. + T f_exp = kExpBias - 15; + T f_mant = T(h_mant) << 1; + while ((f_mant & 0x0400u) == 0) { + --f_exp; + f_mant <<= 1; + } + f_exp <<= kMantNumBits; + f_mant = (f_mant & 0x03ffu) << (kMantNumBits - 10); + return f_sign | f_exp | f_mant; + } break; + // Handle normals + default: + // Equivalent to rebiasing the exponent and shifting everything by the remaining + // mantissa bits. + return f_sign | + ((T(h_bits & 0x7fffu) + (T(kExpBias - 15) << 10)) << (kMantNumBits - 10)); + } +} + +} // namespace + +float Float16::ToFloat() const { + const uint32_t f_bits = BinaryConverter::FromBinary16(bits_); + return SafeCopy(f_bits); +} + +Float16 Float16::FromFloat(float f) { + const uint32_t f_bits = SafeCopy(f); + return FromBits(BinaryConverter::ToBinary16(f_bits)); +} + +double Float16::ToDouble() const { + const uint64_t d_bits = BinaryConverter::FromBinary16(bits_); + return SafeCopy(d_bits); +} + +Float16 Float16::FromDouble(double d) { + const uint64_t d_bits = SafeCopy(d); + return FromBits(BinaryConverter::ToBinary16(d_bits)); +} + +std::ostream& operator<<(std::ostream& os, Float16 arg) { return (os << arg.ToFloat()); } + +} // namespace util +} // namespace arrow diff --git a/cpp/src/arrow/util/float16.h b/cpp/src/arrow/util/float16.h new file mode 100644 index 0000000000000..0a432fee2cd31 --- /dev/null +++ b/cpp/src/arrow/util/float16.h @@ -0,0 +1,209 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#pragma once + +#include +#include +#include +#include +#include +#include + +#include "arrow/util/endian.h" +#include "arrow/util/macros.h" +#include "arrow/util/ubsan.h" +#include "arrow/util/visibility.h" + +namespace arrow { +namespace util { + +/// \brief Class representing an IEEE half-precision float, encoded as a `uint16_t` +/// +/// The exact format is as follows (from LSB to MSB): +/// - bits 0-10: mantissa +/// - bits 10-15: exponent +/// - bit 15: sign +/// +class ARROW_EXPORT Float16 { + public: + Float16() = default; + explicit Float16(float f) : Float16(FromFloat(f)) {} + explicit Float16(double d) : Float16(FromDouble(d)) {} + template >* = NULLPTR> + explicit Float16(T v) : Float16(static_cast(v)) {} + + /// \brief Create a `Float16` from its exact binary representation + constexpr static Float16 FromBits(uint16_t bits) { return Float16{bits, bool{}}; } + /// \brief Create a `Float16` from a 32-bit float (may lose precision) + static Float16 FromFloat(float f); + /// \brief Create a `Float16` from a 64-bit float (may lose precision) + static Float16 FromDouble(double d); + + /// \brief Read a `Float16` from memory in native-endian byte order + static Float16 FromBytes(const uint8_t* src) { + return FromBits(SafeLoadAs(src)); + } + + /// \brief Read a `Float16` from memory in little-endian byte order + static Float16 FromLittleEndian(const uint8_t* src) { + return FromBits(::arrow::bit_util::FromLittleEndian(SafeLoadAs(src))); + } + + /// \brief Read a `Float16` from memory in big-endian byte order + static Float16 FromBigEndian(const uint8_t* src) { + return FromBits(::arrow::bit_util::FromBigEndian(SafeLoadAs(src))); + } + + /// \brief Return the value's binary representation as a `uint16_t` + constexpr uint16_t bits() const { return bits_; } + + /// \brief Return true if the value is negative (sign bit is set) + constexpr bool signbit() const { return (bits_ & 0x8000) != 0; } + + /// \brief Return true if the value is NaN + constexpr bool is_nan() const { return (bits_ & 0x7fff) > 0x7c00; } + /// \brief Return true if the value is positive/negative infinity + constexpr bool is_infinity() const { return (bits_ & 0x7fff) == 0x7c00; } + /// \brief Return true if the value is finite and not NaN + constexpr bool is_finite() const { return (bits_ & 0x7c00) != 0x7c00; } + /// \brief Return true if the value is positive/negative zero + constexpr bool is_zero() const { return (bits_ & 0x7fff) == 0; } + + /// \brief Convert to a 32-bit float + float ToFloat() const; + /// \brief Convert to a 64-bit float + double ToDouble() const; + + explicit operator float() const { return ToFloat(); } + explicit operator double() const { return ToDouble(); } + + /// \brief Copy the value's bytes in native-endian byte order + void ToBytes(uint8_t* dest) const { std::memcpy(dest, &bits_, sizeof(bits_)); } + /// \brief Return the value's bytes in native-endian byte order + constexpr std::array ToBytes() const { +#if ARROW_LITTLE_ENDIAN + return ToLittleEndian(); +#else + return ToBigEndian(); +#endif + } + + /// \brief Copy the value's bytes in little-endian byte order + void ToLittleEndian(uint8_t* dest) const { + const auto bytes = ToLittleEndian(); + std::memcpy(dest, bytes.data(), bytes.size()); + } + /// \brief Return the value's bytes in little-endian byte order + constexpr std::array ToLittleEndian() const { +#if ARROW_LITTLE_ENDIAN + return {uint8_t(bits_ & 0xff), uint8_t(bits_ >> 8)}; +#else + return {uint8_t(bits_ >> 8), uint8_t(bits_ & 0xff)}; +#endif + } + + /// \brief Copy the value's bytes in big-endian byte order + void ToBigEndian(uint8_t* dest) const { + const auto bytes = ToBigEndian(); + std::memcpy(dest, bytes.data(), bytes.size()); + } + /// \brief Return the value's bytes in big-endian byte order + constexpr std::array ToBigEndian() const { +#if ARROW_LITTLE_ENDIAN + return {uint8_t(bits_ >> 8), uint8_t(bits_ & 0xff)}; +#else + return {uint8_t(bits_ & 0xff), uint8_t(bits_ >> 8)}; +#endif + } + + constexpr Float16 operator-() const { return FromBits(bits_ ^ 0x8000); } + constexpr Float16 operator+() const { return FromBits(bits_); } + + friend constexpr bool operator==(Float16 lhs, Float16 rhs) { + if (lhs.is_nan() || rhs.is_nan()) return false; + return Float16::CompareEq(lhs, rhs); + } + friend constexpr bool operator!=(Float16 lhs, Float16 rhs) { return !(lhs == rhs); } + + friend constexpr bool operator<(Float16 lhs, Float16 rhs) { + if (lhs.is_nan() || rhs.is_nan()) return false; + return Float16::CompareLt(lhs, rhs); + } + friend constexpr bool operator>(Float16 lhs, Float16 rhs) { return rhs < lhs; } + + friend constexpr bool operator<=(Float16 lhs, Float16 rhs) { + if (lhs.is_nan() || rhs.is_nan()) return false; + return !Float16::CompareLt(rhs, lhs); + } + friend constexpr bool operator>=(Float16 lhs, Float16 rhs) { return rhs <= lhs; } + + ARROW_FRIEND_EXPORT friend std::ostream& operator<<(std::ostream& os, Float16 arg); + + protected: + uint16_t bits_; + + private: + constexpr Float16(uint16_t bits, bool) : bits_(bits) {} + + // Comparison helpers that assume neither operand is NaN + static constexpr bool CompareEq(Float16 lhs, Float16 rhs) { + return (lhs.bits() == rhs.bits()) || (lhs.is_zero() && rhs.is_zero()); + } + static constexpr bool CompareLt(Float16 lhs, Float16 rhs) { + if (lhs.signbit()) { + if (rhs.signbit()) { + // Both are negative + return lhs.bits() > rhs.bits(); + } else { + // Handle +/-0 + return !lhs.is_zero() || rhs.bits() != 0; + } + } else if (rhs.signbit()) { + return false; + } else { + // Both are positive + return lhs.bits() < rhs.bits(); + } + } +}; + +static_assert(std::is_trivial_v); + +} // namespace util +} // namespace arrow + +// TODO: Not complete +template <> +class std::numeric_limits { + using T = arrow::util::Float16; + + public: + static constexpr bool is_specialized = true; + static constexpr bool is_signed = true; + static constexpr bool has_infinity = true; + static constexpr bool has_quiet_NaN = true; + + static constexpr T min() { return T::FromBits(0b0000010000000000); } + static constexpr T max() { return T::FromBits(0b0111101111111111); } + static constexpr T lowest() { return -max(); } + + static constexpr T infinity() { return T::FromBits(0b0111110000000000); } + + static constexpr T quiet_NaN() { return T::FromBits(0b0111111111111111); } +}; diff --git a/cpp/src/arrow/util/float16_test.cc b/cpp/src/arrow/util/float16_test.cc new file mode 100644 index 0000000000000..073375882e3c2 --- /dev/null +++ b/cpp/src/arrow/util/float16_test.cc @@ -0,0 +1,367 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include +#include +#include + +#include + +#include "arrow/testing/gtest_util.h" +#include "arrow/util/endian.h" +#include "arrow/util/float16.h" +#include "arrow/util/span.h" +#include "arrow/util/ubsan.h" + +namespace arrow::util { +namespace { + +template +using Limits = std::numeric_limits; + +float F32(uint32_t bits) { return SafeCopy(bits); } +double F64(uint64_t bits) { return SafeCopy(bits); } + +template +class Float16ConversionTest : public ::testing::Test { + public: + struct RoundTripTestCase { + T input; + uint16_t bits; + T output; + }; + + static void TestRoundTrip(span test_cases) { + for (size_t index = 0; index < test_cases.size(); ++index) { + ARROW_SCOPED_TRACE("i=", index); + const auto& tc = test_cases[index]; + + const auto f16 = Float16(tc.input); + EXPECT_EQ(tc.bits, f16.bits()); + EXPECT_EQ(tc.output, static_cast(f16)); + + EXPECT_EQ(std::signbit(tc.output), f16.signbit()); + EXPECT_EQ(std::isnan(tc.output), f16.is_nan()); + EXPECT_EQ(std::isinf(tc.output), f16.is_infinity()); + EXPECT_EQ(std::isfinite(tc.output), f16.is_finite()); + } + } + + static void TestRoundTripFromNaN(span test_cases) { + for (size_t i = 0; i < test_cases.size(); ++i) { + ARROW_SCOPED_TRACE("i=", i); + const auto input = test_cases[i]; + + ASSERT_TRUE(std::isnan(input)); + const bool sign = std::signbit(input); + + const auto f16 = Float16(input); + EXPECT_TRUE(f16.is_nan()); + EXPECT_EQ(std::isinf(input), f16.is_infinity()); + EXPECT_EQ(std::isfinite(input), f16.is_finite()); + EXPECT_EQ(sign, f16.signbit()); + + const auto output = static_cast(f16); + EXPECT_TRUE(std::isnan(output)); + EXPECT_EQ(sign, std::signbit(output)); + } + } + + void TestRoundTripFromInf() { + const T test_cases[] = {+Limits::infinity(), -Limits::infinity()}; + + for (size_t i = 0; i < std::size(test_cases); ++i) { + ARROW_SCOPED_TRACE("i=", i); + const auto input = test_cases[i]; + + ASSERT_TRUE(std::isinf(input)); + const bool sign = std::signbit(input); + + const auto f16 = Float16(input); + EXPECT_TRUE(f16.is_infinity()); + EXPECT_EQ(std::isfinite(input), f16.is_finite()); + EXPECT_EQ(std::isnan(input), f16.is_nan()); + EXPECT_EQ(sign, f16.signbit()); + + const auto output = static_cast(f16); + EXPECT_TRUE(std::isinf(output)); + EXPECT_EQ(sign, std::signbit(output)); + } + } + + void TestRoundTrip(); + void TestRoundTripFromNaN(); +}; + +template <> +void Float16ConversionTest::TestRoundTrip() { + // Expected values were also manually validated with numpy-1.24.3 + const RoundTripTestCase test_cases[] = { + // +/-0.0f + {F32(0x80000000u), 0b1000000000000000u, -0.0f}, + {F32(0x00000000u), 0b0000000000000000u, +0.0f}, + // 32-bit exp is 102 => 2^-25. Rounding to nearest. + {F32(0xb3000001u), 0b1000000000000001u, -5.96046447754e-8f}, + // 32-bit exp is 102 => 2^-25. Rounding to even. + {F32(0xb3000000u), 0b1000000000000000u, -0.0f}, + // 32-bit exp is 101 => 2^-26. Underflow to zero. + {F32(0xb2800001u), 0b1000000000000000u, -0.0f}, + // 32-bit exp is 108 => 2^-19. + {F32(0xb61a0000u), 0b1000000000100110u, -2.26497650146e-6f}, + // 32-bit exp is 108 => 2^-19. + {F32(0xb61e0000u), 0b1000000000101000u, -2.38418579102e-6f}, + // 32-bit exp is 112 => 2^-15. Rounding to nearest. + {F32(0xb87fa001u), 0b1000001111111111u, -6.09755516052e-5f}, + // 32-bit exp is 112 => 2^-15. Rounds to 16-bit exp of 1 => 2^-14 + {F32(0xb87fe001u), 0b1000010000000000u, -6.103515625e-5f}, + // 32-bit exp is 142 => 2^15. Rounding to nearest. + {F32(0xc7001001u), 0b1111100000000001u, -32800.0f}, + // 32-bit exp is 142 => 2^15. Rounding to even. + {F32(0xc7001000u), 0b1111100000000000u, -32768.0f}, + // 65520.0f rounds to inf + {F32(0x477ff000u), 0b0111110000000000u, Limits::infinity()}, + // 65488.0039062f rounds to 65504.0 (float16 max) + {F32(0x477fd001u), 0b0111101111111111u, 65504.0f}, + // 32-bit exp is 127 => 2^0, rounds to 16-bit exp of 16 => 2^1. + {F32(0xbffff000u), 0b1100000000000000u, -2.0f}, + // Extreme values should safely clamp to +/-inf + {Limits::max(), 0b0111110000000000u, +Limits::infinity()}, + {Limits::lowest(), 0b1111110000000000u, -Limits::infinity()}, + }; + + TestRoundTrip(span(test_cases, std::size(test_cases))); +} + +template <> +void Float16ConversionTest::TestRoundTrip() { + // Expected values were also manually validated with numpy-1.24.3 + const RoundTripTestCase test_cases[] = { + // +/-0.0 + {F64(0x8000000000000000u), 0b1000000000000000u, -0.0}, + {F64(0x0000000000000000u), 0b0000000000000000u, +0.0}, + // 64-bit exp is 998 => 2^-25. Rounding to nearest. + {F64(0xbe60000000000001u), 0b1000000000000001u, -5.9604644775390625e-8}, + // 64-bit exp is 998 => 2^-25. Rounding to even. + {F64(0xbe60000000000000u), 0b1000000000000000u, -0.0}, + // 64-bit exp is 997 => 2^-26. Underflow to zero. + {F64(0xbe50000000000001u), 0b1000000000000000u, -0.0}, + // 64-bit exp is 1004 => 2^-19. + {F64(0xbec3400000000000u), 0b1000000000100110u, -2.2649765014648438e-6}, + // 64-bit exp is 1004 => 2^-19. + {F64(0xbec3c00000000000u), 0b1000000000101000u, -2.3841857910156250e-6}, + // 64-bit exp is 1008 => 2^-15. Rounding to nearest. + {F64(0xbf0ff40000000001u), 0b1000001111111111u, -6.0975551605224609e-5}, + // 64-bit exp is 1008 => 2^-15. Rounds to 16-bit exp of 1 => 2^-14 + {F64(0xbf0ffc0000000001u), 0b1000010000000000u, -6.1035156250000000e-5}, + // 64-bit exp is 1038 => 2^15. Rounding to nearest. + {F64(0xc0e0020000000001u), 0b1111100000000001u, -32800.0}, + // 64-bit exp is 1038 => 2^15. Rounding to even. + {F64(0xc0e0020000000000u), 0b1111100000000000u, -32768.0}, + // 65520.0 rounds to inf + {F64(0x40effe0000000000u), 0b0111110000000000u, Limits::infinity()}, + // 65488.00000000001 rounds to 65504.0 (float16 max) + {F64(0x40effa0000000001u), 0b0111101111111111u, 65504.0}, + // 64-bit exp is 1023 => 2^0, rounds to 16-bit exp of 16 => 2^1. + {F64(0xbffffe0000000000u), 0b1100000000000000u, -2.0}, + // Extreme values should safely clamp to +/-inf + {Limits::max(), 0b0111110000000000u, +Limits::infinity()}, + {Limits::lowest(), 0b1111110000000000u, -Limits::infinity()}, + }; + + TestRoundTrip(span(test_cases, std::size(test_cases))); +} + +template <> +void Float16ConversionTest::TestRoundTripFromNaN() { + const float test_cases[] = { + Limits::quiet_NaN(), F32(0x7f800001u), F32(0xff800001u), F32(0x7fc00000u), + F32(0xffc00000u), F32(0x7fffffffu), F32(0xffffffffu)}; + TestRoundTripFromNaN(span(test_cases, std::size(test_cases))); +} + +template <> +void Float16ConversionTest::TestRoundTripFromNaN() { + const double test_cases[] = {Limits::quiet_NaN(), F64(0x7ff0000000000001u), + F64(0xfff0000000000001u), F64(0x7ff8000000000000u), + F64(0xfff8000000000000u), F64(0x7fffffffffffffffu), + F64(0xffffffffffffffffu)}; + TestRoundTripFromNaN(span(test_cases, std::size(test_cases))); +} + +using NativeFloatTypes = ::testing::Types; + +TYPED_TEST_SUITE(Float16ConversionTest, NativeFloatTypes); + +TYPED_TEST(Float16ConversionTest, RoundTrip) { this->TestRoundTrip(); } +TYPED_TEST(Float16ConversionTest, RoundTripFromNaN) { this->TestRoundTripFromNaN(); } +TYPED_TEST(Float16ConversionTest, RoundTripFromInf) { this->TestRoundTripFromInf(); } + +TEST(Float16Test, ConstexprFunctions) { + constexpr auto a = Float16::FromBits(0xbc00); // -1.0 + constexpr auto b = Float16::FromBits(0x3c00); // +1.0 + + static_assert(a.bits() == 0xbc00); + static_assert(a.signbit() == true); + static_assert(a.is_nan() == false); + static_assert(a.is_infinity() == false); + static_assert(a.is_finite() == true); + static_assert(a.is_zero() == false); + + static_assert((a == b) == false); + static_assert((a != b) == true); + static_assert((a < b) == true); + static_assert((a > b) == false); + static_assert((a <= b) == true); + static_assert((a >= b) == false); + static_assert(-a == +b); + + constexpr auto v = Float16::FromBits(0xffff); + static_assert(v.ToBytes()[0] == 0xff); + static_assert(v.ToLittleEndian()[0] == 0xff); + static_assert(v.ToBigEndian()[0] == 0xff); +} + +TEST(Float16Test, Constructors) { + // Construction from exact bits + ASSERT_EQ(1, Float16::FromBits(1).bits()); + // Construction from floating point (including implicit conversions) + int i = 0; + for (auto f16 : {Float16(1.0f), Float16(1.0), Float16(1)}) { + ARROW_SCOPED_TRACE("i=", i++); + ASSERT_EQ(0x3c00, f16.bits()); + } +} + +TEST(Float16Test, Compare) { + constexpr float f32_inf = Limits::infinity(); + constexpr float f32_nan = Limits::quiet_NaN(); + + const struct { + Float16 f16; + float f32; + } test_values[] = { + {Limits::min(), +6.103515625e-05f}, + {Limits::max(), +65504.0f}, + {Limits::lowest(), -65504.0f}, + {+Limits::infinity(), +f32_inf}, + {-Limits::infinity(), -f32_inf}, + // Multiple (semantically equivalent) NaN representations + {Float16::FromBits(0x7e00), f32_nan}, + {Float16::FromBits(0xfe00), f32_nan}, + {Float16::FromBits(0x7fff), f32_nan}, + {Float16::FromBits(0xffff), f32_nan}, + // Positive/negative zeros + {Float16::FromBits(0x0000), +0.0f}, + {Float16::FromBits(0x8000), -0.0f}, + // Miscellaneous values. In general, they're chosen to test the sign/exponent and + // exponent/mantissa boundaries + {Float16::FromBits(0x101c), +0.00050163269043f}, + {Float16::FromBits(0x901c), -0.00050163269043f}, + {Float16::FromBits(0x101d), +0.000502109527588f}, + {Float16::FromBits(0x901d), -0.000502109527588f}, + {Float16::FromBits(0x121c), +0.00074577331543f}, + {Float16::FromBits(0x921c), -0.00074577331543f}, + {Float16::FromBits(0x141c), +0.00100326538086f}, + {Float16::FromBits(0x941c), -0.00100326538086f}, + {Float16::FromBits(0x501c), +32.875f}, + {Float16::FromBits(0xd01c), -32.875f}, + // A few subnormals for good measure + {Float16::FromBits(0x001c), +1.66893005371e-06f}, + {Float16::FromBits(0x801c), -1.66893005371e-06f}, + {Float16::FromBits(0x021c), +3.21865081787e-05f}, + {Float16::FromBits(0x821c), -3.21865081787e-05f}, + }; + + auto expect_op = [&](std::string op_name, auto op) { + ARROW_SCOPED_TRACE(op_name); + const auto num_values = static_cast(std::size(test_values)); + + // Check all combinations of operands in both directions + for (int i = 0; i < num_values; ++i) { + for (int j = 0; j < num_values; ++j) { + auto [a16, a32] = test_values[i]; + auto [b16, b32] = test_values[j]; + ARROW_SCOPED_TRACE("[", i, ",", j, "] = ", a16, ",", b16); + + // Results for float16 and float32 should be the same + ASSERT_EQ(op(a16, b16), op(a32, b32)); + } + } + }; + + // Verify that our "equivalent" 16/32-bit values actually are + for (const auto& v : test_values) { + if (std::isnan(v.f32)) { + ASSERT_TRUE(std::isnan(v.f16.ToFloat())); + } else { + ASSERT_EQ(v.f32, v.f16.ToFloat()); + } + } + + expect_op("equal", [](auto l, auto r) { return l == r; }); + expect_op("not_equal", [](auto l, auto r) { return l != r; }); + expect_op("less", [](auto l, auto r) { return l < r; }); + expect_op("greater", [](auto l, auto r) { return l > r; }); + expect_op("less_equal", [](auto l, auto r) { return l <= r; }); + expect_op("greater_equal", [](auto l, auto r) { return l >= r; }); +} + +TEST(Float16Test, ToBytes) { + constexpr auto f16 = Float16::FromBits(0xd01c); + std::array bytes; + auto load = [&bytes]() { return SafeLoadAs(bytes.data()); }; + + // Test native-endian + f16.ToBytes(bytes.data()); + ASSERT_EQ(load(), 0xd01c); + bytes = f16.ToBytes(); + ASSERT_EQ(load(), 0xd01c); + +#if ARROW_LITTLE_ENDIAN + constexpr uint16_t expected_le = 0xd01c; + constexpr uint16_t expected_be = 0x1cd0; +#else + constexpr uint16_t expected_le = 0x1cd0; + constexpr uint16_t expected_be = 0xd01c; +#endif + // Test little-endian + f16.ToLittleEndian(bytes.data()); + ASSERT_EQ(load(), expected_le); + bytes = f16.ToLittleEndian(); + ASSERT_EQ(load(), expected_le); + // Test big-endian + f16.ToBigEndian(bytes.data()); + ASSERT_EQ(load(), expected_be); + bytes = f16.ToBigEndian(); + ASSERT_EQ(load(), expected_be); +} + +TEST(Float16Test, FromBytes) { + constexpr uint16_t u16 = 0xd01c; + const auto* data = reinterpret_cast(&u16); + ASSERT_EQ(Float16::FromBytes(data), Float16::FromBits(0xd01c)); +#if ARROW_LITTLE_ENDIAN + ASSERT_EQ(Float16::FromLittleEndian(data), Float16::FromBits(0xd01c)); + ASSERT_EQ(Float16::FromBigEndian(data), Float16::FromBits(0x1cd0)); +#else + ASSERT_EQ(Float16::FromLittleEndian(data), Float16(0x1cd0)); + ASSERT_EQ(Float16::FromBigEndian(data), Float16(0xd01c)); +#endif +} + +} // namespace +} // namespace arrow::util diff --git a/cpp/src/arrow/util/ree_util.cc b/cpp/src/arrow/util/ree_util.cc index fcd6c204e06b2..819de5eb60c63 100644 --- a/cpp/src/arrow/util/ree_util.cc +++ b/cpp/src/arrow/util/ree_util.cc @@ -61,6 +61,62 @@ int64_t LogicalNullCount(const ArraySpan& span) { return LogicalNullCount(span); } +namespace internal { + +/// \pre 0 <= i < array_span.length() +template +int64_t FindPhysicalIndexImpl(PhysicalIndexFinder& self, int64_t i) { + DCHECK_LT(i, self.array_span.length); + const int64_t run_ends_size = ree_util::RunEndsArray(self.array_span).length; + DCHECK_LT(self.last_physical_index, run_ends_size); + // This access to self.run_ends[last_physical_index] is alwas safe because: + // 1. 0 <= i < array_span.length() implies there is at least one run and the initial + // value 0 will be safe to index with. + // 2. last_physical_index > 0 is always the result of a valid call to + // internal::FindPhysicalIndex. + if (ARROW_PREDICT_TRUE(self.array_span.offset + i < + self.run_ends[self.last_physical_index])) { + // The cached value is an upper-bound, but is it the least upper-bound? + if (self.last_physical_index == 0 || + self.array_span.offset + i >= self.run_ends[self.last_physical_index - 1]) { + return self.last_physical_index; + } + // last_physical_index - 1 is a candidate for the least upper-bound, + // so search for the least upper-bound in the range that includes it. + const int64_t j = ree_util::internal::FindPhysicalIndex( + self.run_ends, /*run_ends_size=*/self.last_physical_index, i, + self.array_span.offset); + DCHECK_LT(j, self.last_physical_index); + return self.last_physical_index = j; + } + + // last_physical_index is not an upper-bound, and the logical index i MUST be + // in the runs that follow it. Since i is a valid logical index, we know that at least + // one extra run is present. + DCHECK_LT(self.last_physical_index + 1, run_ends_size); + const int64_t min_physical_index = self.last_physical_index + 1; + + const int64_t j = ree_util::internal::FindPhysicalIndex( + /*run_ends=*/self.run_ends + min_physical_index, + /*run_ends_size=*/run_ends_size - min_physical_index, i, self.array_span.offset); + DCHECK_LT(min_physical_index + j, run_ends_size); + return self.last_physical_index = min_physical_index + j; +} + +int64_t FindPhysicalIndexImpl16(PhysicalIndexFinder& self, int64_t i) { + return FindPhysicalIndexImpl(self, i); +} + +int64_t FindPhysicalIndexImpl32(PhysicalIndexFinder& self, int64_t i) { + return FindPhysicalIndexImpl(self, i); +} + +int64_t FindPhysicalIndexImpl64(PhysicalIndexFinder& self, int64_t i) { + return FindPhysicalIndexImpl(self, i); +} + +} // namespace internal + int64_t FindPhysicalIndex(const ArraySpan& span, int64_t i, int64_t absolute_offset) { const auto type_id = RunEndsArray(span).type->id(); if (type_id == Type::INT16) { diff --git a/cpp/src/arrow/util/ree_util.h b/cpp/src/arrow/util/ree_util.h index a3d3d16c0da95..2b7940154a50b 100644 --- a/cpp/src/arrow/util/ree_util.h +++ b/cpp/src/arrow/util/ree_util.h @@ -23,6 +23,7 @@ #include "arrow/array/data.h" #include "arrow/type_traits.h" +#include "arrow/util/checked_cast.h" #include "arrow/util/macros.h" namespace arrow { @@ -139,6 +140,69 @@ int64_t FindPhysicalLength(const ArraySpan& span) { /*offset=*/span.offset); } +template +struct PhysicalIndexFinder; + +// non-inline implementations for each run-end type +ARROW_EXPORT int64_t FindPhysicalIndexImpl16(PhysicalIndexFinder& self, + int64_t i); +ARROW_EXPORT int64_t FindPhysicalIndexImpl32(PhysicalIndexFinder& self, + int64_t i); +ARROW_EXPORT int64_t FindPhysicalIndexImpl64(PhysicalIndexFinder& self, + int64_t i); + +/// \brief Stateful version of FindPhysicalIndex() that caches the result of +/// the previous search and uses it to optimize the next search. +/// +/// When new queries for the physical index of a logical index come in, +/// binary search is performed again but the first candidate checked is the +/// result of the previous search (cached physical index) instead of the +/// midpoint of the run-ends array. +/// +/// If that test fails, internal::FindPhysicalIndex() is called with one of the +/// partitions defined by the cached index. If the queried logical indices +/// follow an increasing or decreasing pattern, this first test is much more +/// effective in (1) finding the answer right away (close logical indices belong +/// to the same runs) or (2) discarding many more candidates than probing +/// the midpoint would. +/// +/// The most adversarial case (i.e. alternating between 0 and length-1 queries) +/// only adds one extra binary search probe when compared to always starting +/// binary search from the midpoint without any of these optimizations. +/// +/// \tparam RunEndCType The numeric type of the run-ends array. +template +struct PhysicalIndexFinder { + const ArraySpan array_span; + const RunEndCType* run_ends; + int64_t last_physical_index = 0; + + explicit PhysicalIndexFinder(const ArrayData& data) + : array_span(data), + run_ends(RunEndsArray(array_span).template GetValues(1)) { + assert(CTypeTraits::ArrowType::type_id == + ::arrow::internal::checked_cast(*data.type) + .run_end_type() + ->id()); + } + + /// \brief Find the physical index into the values array of the REE array. + /// + /// \pre 0 <= i < array_span.length() + /// \param i the logical index into the REE array + /// \return the physical index into the values array + int64_t FindPhysicalIndex(int64_t i) { + if constexpr (std::is_same_v) { + return FindPhysicalIndexImpl16(*this, i); + } else if constexpr (std::is_same_v) { + return FindPhysicalIndexImpl32(*this, i); + } else { + static_assert(std::is_same_v, "Unsupported RunEndCType."); + return FindPhysicalIndexImpl64(*this, i); + } + } +}; + } // namespace internal /// \brief Find the physical index into the values array of the REE ArraySpan @@ -166,6 +230,10 @@ ARROW_EXPORT std::pair FindPhysicalRange(const ArraySpan& span int64_t offset, int64_t length); +// Publish PhysicalIndexFinder outside of the internal namespace. +template +using PhysicalIndexFinder = internal::PhysicalIndexFinder; + template class RunEndEncodedArraySpan { private: diff --git a/cpp/src/arrow/util/string.cc b/cpp/src/arrow/util/string.cc index 2055b4f47ea22..192173fa16ce9 100644 --- a/cpp/src/arrow/util/string.cc +++ b/cpp/src/arrow/util/string.cc @@ -25,15 +25,13 @@ namespace arrow { -static const char* kAsciiTable = "0123456789ABCDEF"; - std::string HexEncode(const uint8_t* data, size_t length) { - std::string hex_string; - hex_string.reserve(length * 2); - for (size_t j = 0; j < length; ++j) { + std::string hex_string(length * 2, '\0'); + for (size_t j = 0, i = 0; j < length; ++j) { // Convert to 2 base16 digits - hex_string.push_back(kAsciiTable[data[j] >> 4]); - hex_string.push_back(kAsciiTable[data[j] & 15]); + constexpr auto kHexDigitTable = "0123456789ABCDEF"; + hex_string[i++] = kHexDigitTable[data[j] >> 4]; + hex_string[i++] = kHexDigitTable[data[j] & 0b1111]; } return hex_string; } @@ -73,20 +71,34 @@ std::string HexEncode(std::string_view str) { return HexEncode(str.data(), str.s std::string Escape(std::string_view str) { return Escape(str.data(), str.size()); } -Status ParseHexValue(const char* data, uint8_t* out) { - char c1 = data[0]; - char c2 = data[1]; +constexpr uint8_t kInvalidHexDigit = -1; - const char* kAsciiTableEnd = kAsciiTable + 16; - const char* pos1 = std::lower_bound(kAsciiTable, kAsciiTableEnd, c1); - const char* pos2 = std::lower_bound(kAsciiTable, kAsciiTableEnd, c2); +constexpr uint8_t ParseHexDigit(char c) { + if (c >= '0' && c <= '9') return c - '0'; + if (c >= 'A' && c <= 'F') return c - 'A' + 10; + return kInvalidHexDigit; +} + +Status ParseHexValue(const char* data, uint8_t* out) { + uint8_t high = ParseHexDigit(data[0]); + uint8_t low = ParseHexDigit(data[1]); // Error checking - if (pos1 == kAsciiTableEnd || pos2 == kAsciiTableEnd || *pos1 != c1 || *pos2 != c2) { + if (high == kInvalidHexDigit || low == kInvalidHexDigit) { return Status::Invalid("Encountered non-hex digit"); } - *out = static_cast((pos1 - kAsciiTable) << 4 | (pos2 - kAsciiTable)); + *out = static_cast(high << 4 | low); + return Status::OK(); +} + +Status ParseHexValues(std::string_view hex_string, uint8_t* out) { + if (hex_string.size() % 2 != 0) { + return Status::Invalid("Expected base16 hex string"); + } + for (size_t j = 0; j < hex_string.size() / 2; ++j) { + RETURN_NOT_OK(ParseHexValue(hex_string.data() + j * 2, out + j)); + } return Status::OK(); } diff --git a/cpp/src/arrow/util/string.h b/cpp/src/arrow/util/string.h index d9777efc56a8c..d7e377773f62f 100644 --- a/cpp/src/arrow/util/string.h +++ b/cpp/src/arrow/util/string.h @@ -46,7 +46,9 @@ ARROW_EXPORT std::string HexEncode(std::string_view str); ARROW_EXPORT std::string Escape(std::string_view str); -ARROW_EXPORT Status ParseHexValue(const char* data, uint8_t* out); +ARROW_EXPORT Status ParseHexValue(const char* hex_pair, uint8_t* out); + +ARROW_EXPORT Status ParseHexValues(std::string_view hex_string, uint8_t* out); namespace internal { diff --git a/cpp/src/arrow/visit_data_inline.h b/cpp/src/arrow/visit_data_inline.h index 6a9b32d73a635..a2ba9cfc65071 100644 --- a/cpp/src/arrow/visit_data_inline.h +++ b/cpp/src/arrow/visit_data_inline.h @@ -23,6 +23,7 @@ #include "arrow/status.h" #include "arrow/type.h" #include "arrow/type_traits.h" +#include "arrow/util/binary_view_util.h" #include "arrow/util/bit_block_counter.h" #include "arrow/util/bit_util.h" #include "arrow/util/checked_cast.h" @@ -144,6 +145,42 @@ struct ArraySpanInlineVisitor> { } }; +// BinaryView, StringView... +template +struct ArraySpanInlineVisitor> { + using c_type = std::string_view; + + template + static Status VisitStatus(const ArraySpan& arr, ValidFunc&& valid_func, + NullFunc&& null_func) { + if (arr.length == 0) { + return Status::OK(); + } + auto* s = arr.GetValues(1); + auto* data_buffers = arr.GetVariadicBuffers().data(); + return VisitBitBlocks( + arr.buffers[0].data, arr.offset, arr.length, + [&](int64_t index) { + return valid_func(util::FromBinaryView(s[index], data_buffers)); + }, + [&]() { return null_func(); }); + } + + template + static void VisitVoid(const ArraySpan& arr, ValidFunc&& valid_func, + NullFunc&& null_func) { + if (arr.length == 0) { + return; + } + auto* s = arr.GetValues(1); + auto* data_buffers = arr.GetVariadicBuffers().data(); + VisitBitBlocksVoid( + arr.buffers[0].data, arr.offset, arr.length, + [&](int64_t index) { valid_func(util::FromBinaryView(s[index], data_buffers)); }, + std::forward(null_func)); + } +}; + // FixedSizeBinary, Decimal128 template struct ArraySpanInlineVisitor> { @@ -240,9 +277,8 @@ typename internal::call_traits::enable_if_return::type VisitNullBitmapInline(const uint8_t* valid_bits, int64_t valid_bits_offset, int64_t num_values, int64_t null_count, ValidFunc&& valid_func, NullFunc&& null_func) { - ARROW_UNUSED(null_count); - internal::OptionalBitBlockCounter bit_counter(valid_bits, valid_bits_offset, - num_values); + internal::OptionalBitBlockCounter bit_counter(null_count == 0 ? NULLPTR : valid_bits, + valid_bits_offset, num_values); int64_t position = 0; int64_t offset_position = valid_bits_offset; while (position < num_values) { @@ -273,9 +309,8 @@ typename internal::call_traits::enable_if_return::type VisitNullBitmapInline(const uint8_t* valid_bits, int64_t valid_bits_offset, int64_t num_values, int64_t null_count, ValidFunc&& valid_func, NullFunc&& null_func) { - ARROW_UNUSED(null_count); - internal::OptionalBitBlockCounter bit_counter(valid_bits, valid_bits_offset, - num_values); + internal::OptionalBitBlockCounter bit_counter(null_count == 0 ? NULLPTR : valid_bits, + valid_bits_offset, num_values); int64_t position = 0; int64_t offset_position = valid_bits_offset; while (position < num_values) { diff --git a/cpp/src/arrow/visitor.cc b/cpp/src/arrow/visitor.cc index ed3d5bc2c68d7..e057f6b12fb1b 100644 --- a/cpp/src/arrow/visitor.cc +++ b/cpp/src/arrow/visitor.cc @@ -45,8 +45,10 @@ ARRAY_VISITOR_DEFAULT(UInt64Array) ARRAY_VISITOR_DEFAULT(HalfFloatArray) ARRAY_VISITOR_DEFAULT(FloatArray) ARRAY_VISITOR_DEFAULT(DoubleArray) -ARRAY_VISITOR_DEFAULT(BinaryArray) ARRAY_VISITOR_DEFAULT(StringArray) +ARRAY_VISITOR_DEFAULT(StringViewArray) +ARRAY_VISITOR_DEFAULT(BinaryArray) +ARRAY_VISITOR_DEFAULT(BinaryViewArray) ARRAY_VISITOR_DEFAULT(LargeBinaryArray) ARRAY_VISITOR_DEFAULT(LargeStringArray) ARRAY_VISITOR_DEFAULT(FixedSizeBinaryArray) @@ -96,7 +98,9 @@ TYPE_VISITOR_DEFAULT(HalfFloatType) TYPE_VISITOR_DEFAULT(FloatType) TYPE_VISITOR_DEFAULT(DoubleType) TYPE_VISITOR_DEFAULT(StringType) +TYPE_VISITOR_DEFAULT(StringViewType) TYPE_VISITOR_DEFAULT(BinaryType) +TYPE_VISITOR_DEFAULT(BinaryViewType) TYPE_VISITOR_DEFAULT(LargeStringType) TYPE_VISITOR_DEFAULT(LargeBinaryType) TYPE_VISITOR_DEFAULT(FixedSizeBinaryType) @@ -147,7 +151,9 @@ SCALAR_VISITOR_DEFAULT(HalfFloatScalar) SCALAR_VISITOR_DEFAULT(FloatScalar) SCALAR_VISITOR_DEFAULT(DoubleScalar) SCALAR_VISITOR_DEFAULT(StringScalar) +SCALAR_VISITOR_DEFAULT(StringViewScalar) SCALAR_VISITOR_DEFAULT(BinaryScalar) +SCALAR_VISITOR_DEFAULT(BinaryViewScalar) SCALAR_VISITOR_DEFAULT(LargeStringScalar) SCALAR_VISITOR_DEFAULT(LargeBinaryScalar) SCALAR_VISITOR_DEFAULT(FixedSizeBinaryScalar) diff --git a/cpp/src/arrow/visitor.h b/cpp/src/arrow/visitor.h index b22d4d3c567e1..650b0e7ee0a30 100644 --- a/cpp/src/arrow/visitor.h +++ b/cpp/src/arrow/visitor.h @@ -45,7 +45,9 @@ class ARROW_EXPORT ArrayVisitor { virtual Status Visit(const FloatArray& array); virtual Status Visit(const DoubleArray& array); virtual Status Visit(const StringArray& array); + virtual Status Visit(const StringViewArray& array); virtual Status Visit(const BinaryArray& array); + virtual Status Visit(const BinaryViewArray& array); virtual Status Visit(const LargeStringArray& array); virtual Status Visit(const LargeBinaryArray& array); virtual Status Visit(const FixedSizeBinaryArray& array); @@ -94,7 +96,9 @@ class ARROW_EXPORT TypeVisitor { virtual Status Visit(const FloatType& type); virtual Status Visit(const DoubleType& type); virtual Status Visit(const StringType& type); + virtual Status Visit(const StringViewType& type); virtual Status Visit(const BinaryType& type); + virtual Status Visit(const BinaryViewType& type); virtual Status Visit(const LargeStringType& type); virtual Status Visit(const LargeBinaryType& type); virtual Status Visit(const FixedSizeBinaryType& type); @@ -143,7 +147,9 @@ class ARROW_EXPORT ScalarVisitor { virtual Status Visit(const FloatScalar& scalar); virtual Status Visit(const DoubleScalar& scalar); virtual Status Visit(const StringScalar& scalar); + virtual Status Visit(const StringViewScalar& scalar); virtual Status Visit(const BinaryScalar& scalar); + virtual Status Visit(const BinaryViewScalar& scalar); virtual Status Visit(const LargeStringScalar& scalar); virtual Status Visit(const LargeBinaryScalar& scalar); virtual Status Visit(const FixedSizeBinaryScalar& scalar); diff --git a/cpp/src/arrow/visitor_generate.h b/cpp/src/arrow/visitor_generate.h index 8f6b176ba8fea..4b57abe53ff14 100644 --- a/cpp/src/arrow/visitor_generate.h +++ b/cpp/src/arrow/visitor_generate.h @@ -40,7 +40,9 @@ namespace arrow { ACTION(Boolean); \ ARROW_GENERATE_FOR_ALL_NUMERIC_TYPES(ACTION); \ ACTION(String); \ + ACTION(StringView); \ ACTION(Binary); \ + ACTION(BinaryView); \ ACTION(LargeString); \ ACTION(LargeBinary); \ ACTION(FixedSizeBinary); \ diff --git a/cpp/src/gandiva/CMakeLists.txt b/cpp/src/gandiva/CMakeLists.txt index 6b6743bc8e52f..3448d516768bb 100644 --- a/cpp/src/gandiva/CMakeLists.txt +++ b/cpp/src/gandiva/CMakeLists.txt @@ -25,11 +25,14 @@ add_custom_target(gandiva-benchmarks) add_dependencies(gandiva-all gandiva gandiva-tests gandiva-benchmarks) +include(GandivaAddBitcode) + find_package(LLVMAlt REQUIRED) provide_find_module(LLVMAlt "Gandiva") if(ARROW_WITH_ZSTD AND "${zstd_SOURCE}" STREQUAL "SYSTEM") provide_find_module(zstdAlt "Gandiva") endif() +provide_cmake_module(GandivaAddBitcode "Gandiva") # Set the path where the bitcode file generated, see precompiled/CMakeLists.txt set(GANDIVA_PRECOMPILED_BC_PATH "${CMAKE_CURRENT_BINARY_DIR}/irhelpers.bc") @@ -249,7 +252,8 @@ add_gandiva_test(internals-test random_generator_holder_test.cc hash_utils_test.cc gdv_function_stubs_test.cc - interval_holder_test.cc) + interval_holder_test.cc + tests/test_util.cc) add_subdirectory(precompiled) add_subdirectory(tests) diff --git a/cpp/src/gandiva/GandivaConfig.cmake.in b/cpp/src/gandiva/GandivaConfig.cmake.in index f02e29f25bb3a..68579debd183b 100644 --- a/cpp/src/gandiva/GandivaConfig.cmake.in +++ b/cpp/src/gandiva/GandivaConfig.cmake.in @@ -49,6 +49,7 @@ else() endif() include("${CMAKE_CURRENT_LIST_DIR}/GandivaTargets.cmake") +include("${CMAKE_CURRENT_LIST_DIR}/GandivaAddBitcode.cmake") arrow_keep_backward_compatibility(Gandiva gandiva) diff --git a/cpp/src/gandiva/configuration.cc b/cpp/src/gandiva/configuration.cc index 1e26c5c70d4ec..b79f4118e07f2 100644 --- a/cpp/src/gandiva/configuration.cc +++ b/cpp/src/gandiva/configuration.cc @@ -29,11 +29,14 @@ std::size_t Configuration::Hash() const { size_t result = kHashSeed; arrow::internal::hash_combine(result, static_cast(optimize_)); arrow::internal::hash_combine(result, static_cast(target_host_cpu_)); + arrow::internal::hash_combine( + result, reinterpret_cast(function_registry_.get())); return result; } bool Configuration::operator==(const Configuration& other) const { - return optimize_ == other.optimize_ && target_host_cpu_ == other.target_host_cpu_; + return optimize_ == other.optimize_ && target_host_cpu_ == other.target_host_cpu_ && + function_registry_ == other.function_registry_; } bool Configuration::operator!=(const Configuration& other) const { diff --git a/cpp/src/gandiva/configuration.h b/cpp/src/gandiva/configuration.h index 9cd301524d03d..f43a2b190731f 100644 --- a/cpp/src/gandiva/configuration.h +++ b/cpp/src/gandiva/configuration.h @@ -21,6 +21,7 @@ #include #include "arrow/status.h" +#include "gandiva/function_registry.h" #include "gandiva/visibility.h" namespace gandiva { @@ -34,8 +35,14 @@ class GANDIVA_EXPORT Configuration { public: friend class ConfigurationBuilder; - Configuration() : optimize_(true), target_host_cpu_(true) {} - explicit Configuration(bool optimize) : optimize_(optimize), target_host_cpu_(true) {} + explicit Configuration(bool optimize, + std::shared_ptr function_registry = + gandiva::default_function_registry()) + : optimize_(optimize), + target_host_cpu_(true), + function_registry_(function_registry) {} + + Configuration() : Configuration(true) {} std::size_t Hash() const; bool operator==(const Configuration& other) const; @@ -43,13 +50,21 @@ class GANDIVA_EXPORT Configuration { bool optimize() const { return optimize_; } bool target_host_cpu() const { return target_host_cpu_; } + std::shared_ptr function_registry() const { + return function_registry_; + } void set_optimize(bool optimize) { optimize_ = optimize; } void target_host_cpu(bool target_host_cpu) { target_host_cpu_ = target_host_cpu; } + void set_function_registry(std::shared_ptr function_registry) { + function_registry_ = std::move(function_registry); + } private: bool optimize_; /* optimise the generated llvm IR */ bool target_host_cpu_; /* set the mcpu flag to host cpu while compiling llvm ir */ + std::shared_ptr + function_registry_; /* function registry that may contain external functions */ }; /// \brief configuration builder for gandiva @@ -68,6 +83,13 @@ class GANDIVA_EXPORT ConfigurationBuilder { return configuration; } + std::shared_ptr build( + std::shared_ptr function_registry) { + std::shared_ptr configuration( + new Configuration(true, std::move(function_registry))); + return configuration; + } + static std::shared_ptr DefaultConfiguration() { return default_configuration_; } diff --git a/cpp/src/gandiva/engine.cc b/cpp/src/gandiva/engine.cc index 8ebe927437567..5ae1d76876148 100644 --- a/cpp/src/gandiva/engine.cc +++ b/cpp/src/gandiva/engine.cc @@ -141,7 +141,8 @@ Engine::Engine(const std::shared_ptr& conf, module_(module), types_(*context_), optimize_(conf->optimize()), - cached_(cached) {} + cached_(cached), + function_registry_(conf->function_registry()) {} Status Engine::Init() { std::call_once(register_exported_funcs_flag, gandiva::RegisterExportedFuncs); @@ -155,6 +156,7 @@ Status Engine::LoadFunctionIRs() { if (!functions_loaded_) { ARROW_RETURN_NOT_OK(LoadPreCompiledIR()); ARROW_RETURN_NOT_OK(DecimalIR::AddFunctions(this)); + ARROW_RETURN_NOT_OK(LoadExternalPreCompiledIR()); functions_loaded_ = true; } return Status::OK(); @@ -236,7 +238,38 @@ static void SetDataLayout(llvm::Module* module) { module->setDataLayout(machine->createDataLayout()); } -// end of the mofified method from MLIR +// end of the modified method from MLIR + +template +static arrow::Result AsArrowResult(llvm::Expected& expected) { + if (!expected) { + std::string str; + llvm::raw_string_ostream stream(str); + stream << expected.takeError(); + return Status::CodeGenError(stream.str()); + } + return std::move(expected.get()); +} + +static arrow::Status VerifyAndLinkModule( + llvm::Module* dest_module, + llvm::Expected> src_module_or_error) { + ARROW_ASSIGN_OR_RAISE(auto src_ir_module, AsArrowResult(src_module_or_error)); + + // set dataLayout + SetDataLayout(src_ir_module.get()); + + std::string error_info; + llvm::raw_string_ostream error_stream(error_info); + ARROW_RETURN_IF( + llvm::verifyModule(*src_ir_module, &error_stream), + Status::CodeGenError("verify of IR Module failed: " + error_stream.str())); + + ARROW_RETURN_IF(llvm::Linker::linkModules(*dest_module, std::move(src_ir_module)), + Status::CodeGenError("failed to link IR Modules")); + + return Status::OK(); +} // Handling for pre-compiled IR libraries. Status Engine::LoadPreCompiledIR() { @@ -256,23 +289,25 @@ Status Engine::LoadPreCompiledIR() { /// Parse the IR module. llvm::Expected> module_or_error = llvm::getOwningLazyBitcodeModule(std::move(buffer), *context()); - if (!module_or_error) { - // NOTE: llvm::handleAllErrors() fails linking with RTTI-disabled LLVM builds - // (ARROW-5148) - std::string str; - llvm::raw_string_ostream stream(str); - stream << module_or_error.takeError(); - return Status::CodeGenError(stream.str()); - } - std::unique_ptr ir_module = std::move(module_or_error.get()); + // NOTE: llvm::handleAllErrors() fails linking with RTTI-disabled LLVM builds + // (ARROW-5148) + ARROW_RETURN_NOT_OK(VerifyAndLinkModule(module_, std::move(module_or_error))); + return Status::OK(); +} - // set dataLayout - SetDataLayout(ir_module.get()); +static llvm::MemoryBufferRef AsLLVMMemoryBuffer(const arrow::Buffer& arrow_buffer) { + auto data = reinterpret_cast(arrow_buffer.data()); + auto size = arrow_buffer.size(); + return llvm::MemoryBufferRef(llvm::StringRef(data, size), "external_bitcode"); +} - ARROW_RETURN_IF(llvm::verifyModule(*ir_module, &llvm::errs()), - Status::CodeGenError("verify of IR Module failed")); - ARROW_RETURN_IF(llvm::Linker::linkModules(*module_, std::move(ir_module)), - Status::CodeGenError("failed to link IR Modules")); +Status Engine::LoadExternalPreCompiledIR() { + auto const& buffers = function_registry_->GetBitcodeBuffers(); + for (auto const& buffer : buffers) { + auto llvm_memory_buffer_ref = AsLLVMMemoryBuffer(*buffer); + auto module_or_error = llvm::parseBitcodeFile(llvm_memory_buffer_ref, *context()); + ARROW_RETURN_NOT_OK(VerifyAndLinkModule(module_, std::move(module_or_error))); + } return Status::OK(); } diff --git a/cpp/src/gandiva/engine.h b/cpp/src/gandiva/engine.h index a4d6a5fd1a758..566977dc4adad 100644 --- a/cpp/src/gandiva/engine.h +++ b/cpp/src/gandiva/engine.h @@ -93,6 +93,9 @@ class GANDIVA_EXPORT Engine { /// the main module. Status LoadPreCompiledIR(); + // load external pre-compiled bitcodes into module + Status LoadExternalPreCompiledIR(); + // Create and add mappings for cpp functions that can be accessed from LLVM. void AddGlobalMappings(); @@ -111,6 +114,7 @@ class GANDIVA_EXPORT Engine { bool module_finalized_ = false; bool cached_; bool functions_loaded_ = false; + std::shared_ptr function_registry_; }; } // namespace gandiva diff --git a/cpp/src/gandiva/expr_decomposer_test.cc b/cpp/src/gandiva/expr_decomposer_test.cc index 638ceebcb19fd..7681d9e646297 100644 --- a/cpp/src/gandiva/expr_decomposer_test.cc +++ b/cpp/src/gandiva/expr_decomposer_test.cc @@ -24,7 +24,6 @@ #include "gandiva/function_registry.h" #include "gandiva/gandiva_aliases.h" #include "gandiva/node.h" -#include "gandiva/tree_expr_builder.h" namespace gandiva { @@ -32,12 +31,12 @@ using arrow::int32; class TestExprDecomposer : public ::testing::Test { protected: - FunctionRegistry registry_; + std::shared_ptr registry_ = default_function_registry(); }; TEST_F(TestExprDecomposer, TestStackSimple) { Annotator annotator; - ExprDecomposer decomposer(registry_, annotator); + ExprDecomposer decomposer(*registry_, annotator); // if (a) _ // else _ @@ -58,7 +57,7 @@ TEST_F(TestExprDecomposer, TestStackSimple) { TEST_F(TestExprDecomposer, TestNested) { Annotator annotator; - ExprDecomposer decomposer(registry_, annotator); + ExprDecomposer decomposer(*registry_, annotator); // if (a) _ // else _ @@ -97,7 +96,7 @@ TEST_F(TestExprDecomposer, TestNested) { TEST_F(TestExprDecomposer, TestInternalIf) { Annotator annotator; - ExprDecomposer decomposer(registry_, annotator); + ExprDecomposer decomposer(*registry_, annotator); // if (a) _ // if (b) _ @@ -136,7 +135,7 @@ TEST_F(TestExprDecomposer, TestInternalIf) { TEST_F(TestExprDecomposer, TestParallelIf) { Annotator annotator; - ExprDecomposer decomposer(registry_, annotator); + ExprDecomposer decomposer(*registry_, annotator); // if (a) _ // else _ @@ -174,7 +173,7 @@ TEST_F(TestExprDecomposer, TestParallelIf) { TEST_F(TestExprDecomposer, TestIfInCondition) { Annotator annotator; - ExprDecomposer decomposer(registry_, annotator); + ExprDecomposer decomposer(*registry_, annotator); // if (if _ else _) : a // - @@ -245,7 +244,7 @@ TEST_F(TestExprDecomposer, TestIfInCondition) { TEST_F(TestExprDecomposer, TestFunctionBetweenNestedIf) { Annotator annotator; - ExprDecomposer decomposer(registry_, annotator); + ExprDecomposer decomposer(*registry_, annotator); // if (a) _ // else @@ -286,7 +285,7 @@ TEST_F(TestExprDecomposer, TestFunctionBetweenNestedIf) { TEST_F(TestExprDecomposer, TestComplexIfCondition) { Annotator annotator; - ExprDecomposer decomposer(registry_, annotator); + ExprDecomposer decomposer(*registry_, annotator); // if (if _ // else diff --git a/cpp/src/gandiva/expr_validator.cc b/cpp/src/gandiva/expr_validator.cc index 35a13494523d0..8a6f86e6f0419 100644 --- a/cpp/src/gandiva/expr_validator.cc +++ b/cpp/src/gandiva/expr_validator.cc @@ -93,7 +93,7 @@ Status ExprValidator::Visit(const FunctionNode& node) { const auto& desc = node.descriptor(); FunctionSignature signature(desc->name(), desc->params(), desc->return_type()); - const NativeFunction* native_function = registry_.LookupSignature(signature); + const NativeFunction* native_function = registry_->LookupSignature(signature); ARROW_RETURN_IF(native_function == nullptr, Status::ExpressionValidationError("Function ", signature.ToString(), " not supported yet. ")); diff --git a/cpp/src/gandiva/expr_validator.h b/cpp/src/gandiva/expr_validator.h index 7f6d7fd131fbe..8a423fc93b02b 100644 --- a/cpp/src/gandiva/expr_validator.h +++ b/cpp/src/gandiva/expr_validator.h @@ -37,8 +37,9 @@ class FunctionRegistry; /// data types, signatures and return types class ExprValidator : public NodeVisitor { public: - explicit ExprValidator(LLVMTypes* types, SchemaPtr schema) - : types_(types), schema_(schema) { + explicit ExprValidator(LLVMTypes* types, SchemaPtr schema, + std::shared_ptr registry) + : types_(types), schema_(schema), registry_(std::move(registry)) { for (auto& field : schema_->fields()) { field_map_[field->name()] = field; } @@ -65,12 +66,12 @@ class ExprValidator : public NodeVisitor { Status Visit(const InExpressionNode& node) override; Status Visit(const InExpressionNode& node) override; - FunctionRegistry registry_; - LLVMTypes* types_; SchemaPtr schema_; + std::shared_ptr registry_; + using FieldMap = std::unordered_map; FieldMap field_map_; }; diff --git a/cpp/src/gandiva/expression_registry.cc b/cpp/src/gandiva/expression_registry.cc index 9bff97f5ad269..dd964a7cb8a7a 100644 --- a/cpp/src/gandiva/expression_registry.cc +++ b/cpp/src/gandiva/expression_registry.cc @@ -22,9 +22,9 @@ namespace gandiva { -ExpressionRegistry::ExpressionRegistry() { - function_registry_.reset(new FunctionRegistry()); -} +ExpressionRegistry::ExpressionRegistry( + std::shared_ptr function_registry) + : function_registry_{function_registry} {} ExpressionRegistry::~ExpressionRegistry() {} diff --git a/cpp/src/gandiva/expression_registry.h b/cpp/src/gandiva/expression_registry.h index 609a2dbbe21f9..156a6392564f9 100644 --- a/cpp/src/gandiva/expression_registry.h +++ b/cpp/src/gandiva/expression_registry.h @@ -21,6 +21,7 @@ #include #include "gandiva/arrow.h" +#include "gandiva/function_registry.h" #include "gandiva/function_signature.h" #include "gandiva/gandiva_aliases.h" #include "gandiva/visibility.h" @@ -37,7 +38,8 @@ class GANDIVA_EXPORT ExpressionRegistry { public: using native_func_iterator_type = const NativeFunction*; using func_sig_iterator_type = const FunctionSignature*; - ExpressionRegistry(); + explicit ExpressionRegistry(std::shared_ptr function_registry = + gandiva::default_function_registry()); ~ExpressionRegistry(); static DataTypeVector supported_types() { return supported_types_; } class GANDIVA_EXPORT FunctionSignatureIterator { @@ -62,7 +64,7 @@ class GANDIVA_EXPORT ExpressionRegistry { private: static DataTypeVector supported_types_; - std::unique_ptr function_registry_; + std::shared_ptr function_registry_; }; /// \brief Get the list of all function signatures. diff --git a/cpp/src/gandiva/expression_registry_test.cc b/cpp/src/gandiva/expression_registry_test.cc index c254ff4f3aa5e..cd784192c194e 100644 --- a/cpp/src/gandiva/expression_registry_test.cc +++ b/cpp/src/gandiva/expression_registry_test.cc @@ -31,7 +31,7 @@ typedef int64_t (*add_vector_func_t)(int64_t* elements, int nelements); class TestExpressionRegistry : public ::testing::Test { protected: - FunctionRegistry registry_; + std::shared_ptr registry_ = default_function_registry(); }; // Verify all functions in registry are exported. @@ -42,7 +42,7 @@ TEST_F(TestExpressionRegistry, VerifySupportedFunctions) { iter != expr_registry.function_signature_end(); iter++) { functions.push_back((*iter)); } - for (auto& iter : registry_) { + for (auto& iter : *registry_) { for (auto& func_iter : iter.signatures()) { auto element = std::find(functions.begin(), functions.end(), func_iter); EXPECT_NE(element, functions.end()) << "function signature " << func_iter.ToString() diff --git a/cpp/src/gandiva/filter.cc b/cpp/src/gandiva/filter.cc index 78917467a0f56..416d97b5dbd1d 100644 --- a/cpp/src/gandiva/filter.cc +++ b/cpp/src/gandiva/filter.cc @@ -71,7 +71,8 @@ Status Filter::Make(SchemaPtr schema, ConditionPtr condition, if (!is_cached) { // Run the validation on the expression. // Return if the expression is invalid since we will not be able to process further. - ExprValidator expr_validator(llvm_gen->types(), schema); + ExprValidator expr_validator(llvm_gen->types(), schema, + configuration->function_registry()); ARROW_RETURN_NOT_OK(expr_validator.Validate(condition)); } diff --git a/cpp/src/gandiva/function_registry.cc b/cpp/src/gandiva/function_registry.cc index 67b7b404b325c..5d676dfa8df74 100644 --- a/cpp/src/gandiva/function_registry.cc +++ b/cpp/src/gandiva/function_registry.cc @@ -16,6 +16,13 @@ // under the License. #include "gandiva/function_registry.h" + +#include +#include + +#include + +#include "arrow/util/logging.h" #include "gandiva/function_registry_arithmetic.h" #include "gandiva/function_registry_datetime.h" #include "gandiva/function_registry_hash.h" @@ -23,12 +30,26 @@ #include "gandiva/function_registry_string.h" #include "gandiva/function_registry_timestamp_arithmetic.h" -#include -#include -#include - namespace gandiva { +static constexpr uint32_t kMaxFunctionSignatures = 2048; + +// encapsulates an llvm memory buffer in an arrow buffer +// this is needed because we don't expose the llvm memory buffer to the outside world in +// the header file +class LLVMMemoryArrowBuffer : public arrow::Buffer { + public: + explicit LLVMMemoryArrowBuffer(std::unique_ptr llvm_buffer) + : arrow::Buffer(reinterpret_cast(llvm_buffer->getBufferStart()), + static_cast(llvm_buffer->getBufferSize())), + llvm_buffer_(std::move(llvm_buffer)) {} + + private: + std::unique_ptr llvm_buffer_; +}; + +FunctionRegistry::FunctionRegistry() { pc_registry_.reserve(kMaxFunctionSignatures); } + FunctionRegistry::iterator FunctionRegistry::begin() const { return &(*pc_registry_.begin()); } @@ -41,42 +62,74 @@ FunctionRegistry::iterator FunctionRegistry::back() const { return &(pc_registry_.back()); } -std::vector FunctionRegistry::pc_registry_; +const NativeFunction* FunctionRegistry::LookupSignature( + const FunctionSignature& signature) const { + auto got = pc_registry_map_.find(&signature); + return got == pc_registry_map_.end() ? nullptr : got->second; +} -SignatureMap FunctionRegistry::pc_registry_map_ = InitPCMap(); +Status FunctionRegistry::Add(NativeFunction func) { + if (pc_registry_.size() == kMaxFunctionSignatures) { + return Status::CapacityError("Exceeded max function signatures limit of ", + kMaxFunctionSignatures); + } + pc_registry_.emplace_back(std::move(func)); + auto const& last_func = pc_registry_.back(); + for (auto const& func_signature : last_func.signatures()) { + pc_registry_map_.emplace(&func_signature, &last_func); + } + return arrow::Status::OK(); +} -SignatureMap FunctionRegistry::InitPCMap() { - SignatureMap map; +arrow::Result> GetBufferFromFile( + const std::string& bitcode_file_path) { + auto buffer_or_error = llvm::MemoryBuffer::getFile(bitcode_file_path); - auto v1 = GetArithmeticFunctionRegistry(); - pc_registry_.insert(std::end(pc_registry_), v1.begin(), v1.end()); - auto v2 = GetDateTimeFunctionRegistry(); - pc_registry_.insert(std::end(pc_registry_), v2.begin(), v2.end()); + ARROW_RETURN_IF(!buffer_or_error, + Status::IOError("Could not load module from bitcode file: ", + bitcode_file_path + + " Error: " + buffer_or_error.getError().message())); - auto v3 = GetHashFunctionRegistry(); - pc_registry_.insert(std::end(pc_registry_), v3.begin(), v3.end()); + return std::move(buffer_or_error.get()); +} - auto v4 = GetMathOpsFunctionRegistry(); - pc_registry_.insert(std::end(pc_registry_), v4.begin(), v4.end()); +Status FunctionRegistry::Register(const std::vector& funcs, + const std::string& bitcode_path) { + ARROW_ASSIGN_OR_RAISE(auto llvm_buffer, GetBufferFromFile(bitcode_path)); + auto buffer = std::make_shared(std::move(llvm_buffer)); + return Register(funcs, std::move(buffer)); +} - auto v5 = GetStringFunctionRegistry(); - pc_registry_.insert(std::end(pc_registry_), v5.begin(), v5.end()); +arrow::Status FunctionRegistry::Register(const std::vector& funcs, + std::shared_ptr bitcode_buffer) { + bitcode_memory_buffers_.emplace_back(std::move(bitcode_buffer)); + for (const auto& func : funcs) { + ARROW_RETURN_NOT_OK(FunctionRegistry::Add(func)); + } + return Status::OK(); +} - auto v6 = GetDateTimeArithmeticFunctionRegistry(); - pc_registry_.insert(std::end(pc_registry_), v6.begin(), v6.end()); - for (auto& elem : pc_registry_) { - for (auto& func_signature : elem.signatures()) { - map.insert(std::make_pair(&(func_signature), &elem)); +const std::vector>& FunctionRegistry::GetBitcodeBuffers() + const { + return bitcode_memory_buffers_; +} + +arrow::Result> MakeDefaultFunctionRegistry() { + auto registry = std::make_shared(); + for (auto const& funcs : + {GetArithmeticFunctionRegistry(), GetDateTimeFunctionRegistry(), + GetHashFunctionRegistry(), GetMathOpsFunctionRegistry(), + GetStringFunctionRegistry(), GetDateTimeArithmeticFunctionRegistry()}) { + for (auto const& func_signature : funcs) { + ARROW_RETURN_NOT_OK(registry->Add(func_signature)); } } - - return map; + return std::move(registry); } -const NativeFunction* FunctionRegistry::LookupSignature( - const FunctionSignature& signature) const { - auto got = pc_registry_map_.find(&signature); - return got == pc_registry_map_.end() ? nullptr : got->second; +std::shared_ptr default_function_registry() { + static auto default_registry = *MakeDefaultFunctionRegistry(); + return default_registry; } } // namespace gandiva diff --git a/cpp/src/gandiva/function_registry.h b/cpp/src/gandiva/function_registry.h index d9256326019c6..01984961dc90f 100644 --- a/cpp/src/gandiva/function_registry.h +++ b/cpp/src/gandiva/function_registry.h @@ -17,7 +17,12 @@ #pragma once +#include +#include #include + +#include "arrow/buffer.h" +#include "arrow/status.h" #include "gandiva/function_registry_common.h" #include "gandiva/gandiva_aliases.h" #include "gandiva/native_function.h" @@ -30,18 +35,41 @@ class GANDIVA_EXPORT FunctionRegistry { public: using iterator = const NativeFunction*; + FunctionRegistry(); + FunctionRegistry(const FunctionRegistry&) = delete; + FunctionRegistry& operator=(const FunctionRegistry&) = delete; + /// Lookup a pre-compiled function by its signature. const NativeFunction* LookupSignature(const FunctionSignature& signature) const; + /// \brief register a set of functions into the function registry from a given bitcode + /// file + arrow::Status Register(const std::vector& funcs, + const std::string& bitcode_path); + + /// \brief register a set of functions into the function registry from a given bitcode + /// buffer + arrow::Status Register(const std::vector& funcs, + std::shared_ptr bitcode_buffer); + + /// \brief get a list of bitcode memory buffers saved in the registry + const std::vector>& GetBitcodeBuffers() const; + iterator begin() const; iterator end() const; iterator back() const; + friend arrow::Result> MakeDefaultFunctionRegistry(); + private: - static SignatureMap InitPCMap(); + std::vector pc_registry_; + SignatureMap pc_registry_map_; + std::vector> bitcode_memory_buffers_; - static std::vector pc_registry_; - static SignatureMap pc_registry_map_; + Status Add(NativeFunction func); }; +/// \brief get the default function registry +GANDIVA_EXPORT std::shared_ptr default_function_registry(); + } // namespace gandiva diff --git a/cpp/src/gandiva/function_registry_test.cc b/cpp/src/gandiva/function_registry_test.cc index e3c1e85f79cba..bbe72c0ee970c 100644 --- a/cpp/src/gandiva/function_registry_test.cc +++ b/cpp/src/gandiva/function_registry_test.cc @@ -23,17 +23,26 @@ #include #include +#include "gandiva/tests/test_util.h" + namespace gandiva { class TestFunctionRegistry : public ::testing::Test { protected: - FunctionRegistry registry_; + std::shared_ptr registry_ = gandiva::default_function_registry(); + + static std::unique_ptr MakeFunctionRegistryWithExternalFunction() { + auto registry = std::make_unique(); + ARROW_EXPECT_OK( + registry->Register({GetTestExternalFunction()}, GetTestFunctionLLVMIRPath())); + return registry; + } }; TEST_F(TestFunctionRegistry, TestFound) { FunctionSignature add_i32_i32("add", {arrow::int32(), arrow::int32()}, arrow::int32()); - const NativeFunction* function = registry_.LookupSignature(add_i32_i32); + const NativeFunction* function = registry_->LookupSignature(add_i32_i32); EXPECT_NE(function, nullptr); EXPECT_THAT(function->signatures(), testing::Contains(add_i32_i32)); EXPECT_EQ(function->pc_name(), "add_int32_int32"); @@ -42,11 +51,32 @@ TEST_F(TestFunctionRegistry, TestFound) { TEST_F(TestFunctionRegistry, TestNotFound) { FunctionSignature addX_i32_i32("addX", {arrow::int32(), arrow::int32()}, arrow::int32()); - EXPECT_EQ(registry_.LookupSignature(addX_i32_i32), nullptr); + EXPECT_EQ(registry_->LookupSignature(addX_i32_i32), nullptr); FunctionSignature add_i32_i32_ret64("add", {arrow::int32(), arrow::int32()}, arrow::int64()); - EXPECT_EQ(registry_.LookupSignature(add_i32_i32_ret64), nullptr); + EXPECT_EQ(registry_->LookupSignature(add_i32_i32_ret64), nullptr); +} + +TEST_F(TestFunctionRegistry, TestCustomFunctionRegistry) { + auto registry = MakeFunctionRegistryWithExternalFunction(); + + auto multiply_by_two_func = GetTestExternalFunction(); + auto multiply_by_two_int32_ret64 = multiply_by_two_func.signatures().front(); + EXPECT_NE(registry->LookupSignature(multiply_by_two_int32_ret64), nullptr); + + FunctionSignature add_i32_i32_ret64("add", {arrow::int32(), arrow::int32()}, + arrow::int64()); + EXPECT_EQ(registry->LookupSignature(add_i32_i32_ret64), nullptr); +} + +TEST_F(TestFunctionRegistry, TestGetBitcodeMemoryBuffersDefaultFunctionRegistry) { + EXPECT_EQ(registry_->GetBitcodeBuffers().size(), 0); +} + +TEST_F(TestFunctionRegistry, TestGetBitcodeMemoryBuffersCustomFunctionRegistry) { + auto registry = MakeFunctionRegistryWithExternalFunction(); + EXPECT_EQ(registry->GetBitcodeBuffers().size(), 1); } // one nativefunction object per precompiled function @@ -55,10 +85,9 @@ TEST_F(TestFunctionRegistry, TestNoDuplicates) { std::unordered_set native_func_duplicates; std::unordered_set func_sigs; std::unordered_set func_sig_duplicates; - for (auto native_func_it = registry_.begin(); native_func_it != registry_.end(); - ++native_func_it) { - auto& first_sig = native_func_it->signatures().front(); - auto pc_func_sig = FunctionSignature(native_func_it->pc_name(), + for (const auto& native_func_it : *registry_) { + auto& first_sig = native_func_it.signatures().front(); + auto pc_func_sig = FunctionSignature(native_func_it.pc_name(), first_sig.param_types(), first_sig.ret_type()) .ToString(); if (pc_func_sigs.count(pc_func_sig) == 0) { @@ -67,7 +96,7 @@ TEST_F(TestFunctionRegistry, TestNoDuplicates) { native_func_duplicates.insert(pc_func_sig); } - for (auto& sig : native_func_it->signatures()) { + for (auto& sig : native_func_it.signatures()) { auto sig_str = sig.ToString(); if (func_sigs.count(sig_str) == 0) { func_sigs.insert(sig_str); diff --git a/cpp/src/gandiva/llvm_generator.cc b/cpp/src/gandiva/llvm_generator.cc index fa1d97be301a8..41cbe0ffe3a3a 100644 --- a/cpp/src/gandiva/llvm_generator.cc +++ b/cpp/src/gandiva/llvm_generator.cc @@ -36,11 +36,16 @@ namespace gandiva { AddTrace(__VA_ARGS__); \ } -LLVMGenerator::LLVMGenerator(bool cached) : cached_(cached), enable_ir_traces_(false) {} +LLVMGenerator::LLVMGenerator(bool cached, + std::shared_ptr function_registry) + : cached_(cached), + function_registry_(std::move(function_registry)), + enable_ir_traces_(false) {} -Status LLVMGenerator::Make(std::shared_ptr config, bool cached, +Status LLVMGenerator::Make(const std::shared_ptr& config, bool cached, std::unique_ptr* llvm_generator) { - std::unique_ptr llvmgen_obj(new LLVMGenerator(cached)); + std::unique_ptr llvmgen_obj( + new LLVMGenerator(cached, config->function_registry())); ARROW_RETURN_NOT_OK(Engine::Make(config, cached, &(llvmgen_obj->engine_))); *llvm_generator = std::move(llvmgen_obj); @@ -64,7 +69,7 @@ void LLVMGenerator::SetLLVMObjectCache(GandivaObjectCache& object_cache) { Status LLVMGenerator::Add(const ExpressionPtr expr, const FieldDescriptorPtr output) { int idx = static_cast(compiled_exprs_.size()); // decompose the expression to separate out value and validities. - ExprDecomposer decomposer(function_registry_, annotator_); + ExprDecomposer decomposer(*function_registry_, annotator_); ValueValidityPairPtr value_validity; ARROW_RETURN_NOT_OK(decomposer.Decompose(*expr->root(), &value_validity)); // Generate the IR function for the decomposed expression. diff --git a/cpp/src/gandiva/llvm_generator.h b/cpp/src/gandiva/llvm_generator.h index 04f9b854b1d29..1921e2565338b 100644 --- a/cpp/src/gandiva/llvm_generator.h +++ b/cpp/src/gandiva/llvm_generator.h @@ -47,7 +47,7 @@ class FunctionHolder; class GANDIVA_EXPORT LLVMGenerator { public: /// \brief Factory method to initialize the generator. - static Status Make(std::shared_ptr config, bool cached, + static Status Make(const std::shared_ptr& config, bool cached, std::unique_ptr* llvm_generator); /// \brief Get the cache to be used for LLVM ObjectCache. @@ -82,11 +82,13 @@ class GANDIVA_EXPORT LLVMGenerator { std::string DumpIR() { return engine_->DumpIR(); } private: - explicit LLVMGenerator(bool cached); + explicit LLVMGenerator(bool cached, + std::shared_ptr function_registry); FRIEND_TEST(TestLLVMGenerator, VerifyPCFunctions); FRIEND_TEST(TestLLVMGenerator, TestAdd); FRIEND_TEST(TestLLVMGenerator, TestNullInternal); + FRIEND_TEST(TestLLVMGenerator, VerifyExtendedPCFunctions); llvm::LLVMContext* context() { return engine_->context(); } llvm::IRBuilder<>* ir_builder() { return engine_->ir_builder(); } @@ -250,7 +252,7 @@ class GANDIVA_EXPORT LLVMGenerator { std::unique_ptr engine_; std::vector> compiled_exprs_; bool cached_; - FunctionRegistry function_registry_; + std::shared_ptr function_registry_; Annotator annotator_; SelectionVector::Mode selection_vector_mode_; diff --git a/cpp/src/gandiva/llvm_generator_test.cc b/cpp/src/gandiva/llvm_generator_test.cc index 028893b0b4594..671ce91e870f6 100644 --- a/cpp/src/gandiva/llvm_generator_test.cc +++ b/cpp/src/gandiva/llvm_generator_test.cc @@ -35,7 +35,7 @@ typedef int64_t (*add_vector_func_t)(int64_t* elements, int nelements); class TestLLVMGenerator : public ::testing::Test { protected: - FunctionRegistry registry_; + std::shared_ptr registry_ = default_function_registry(); }; // Verify that a valid pc function exists for every function in the registry. @@ -45,7 +45,7 @@ TEST_F(TestLLVMGenerator, VerifyPCFunctions) { llvm::Module* module = generator->module(); ASSERT_OK(generator->engine_->LoadFunctionIRs()); - for (auto& iter : registry_) { + for (auto& iter : *registry_) { EXPECT_NE(module->getFunction(iter.pc_name()), nullptr); } } @@ -73,7 +73,7 @@ TEST_F(TestLLVMGenerator, TestAdd) { FunctionSignature signature(func_desc->name(), func_desc->params(), func_desc->return_type()); const NativeFunction* native_func = - generator->function_registry_.LookupSignature(signature); + generator->function_registry_->LookupSignature(signature); std::vector pairs{pair0, pair1}; auto func_dex = std::make_shared( @@ -115,4 +115,17 @@ TEST_F(TestLLVMGenerator, TestAdd) { EXPECT_EQ(out_bitmap, 0ULL); } +TEST_F(TestLLVMGenerator, VerifyExtendedPCFunctions) { + auto external_registry = std::make_shared(); + auto config_with_func_registry = + TestConfigurationWithFunctionRegistry(std::move(external_registry)); + + std::unique_ptr generator; + ASSERT_OK(LLVMGenerator::Make(config_with_func_registry, false, &generator)); + + auto module = generator->module(); + ASSERT_OK(generator->engine_->LoadFunctionIRs()); + EXPECT_NE(module->getFunction("multiply_by_two_int32"), nullptr); +} + } // namespace gandiva diff --git a/cpp/src/gandiva/native_function.h b/cpp/src/gandiva/native_function.h index 1268a25674a9d..c20de3dbdd54d 100644 --- a/cpp/src/gandiva/native_function.h +++ b/cpp/src/gandiva/native_function.h @@ -54,16 +54,16 @@ class GANDIVA_EXPORT NativeFunction { bool CanReturnErrors() const { return (flags_ & kCanReturnErrors) != 0; } NativeFunction(const std::string& base_name, const std::vector& aliases, - const DataTypeVector& param_types, DataTypePtr ret_type, - const ResultNullableType& result_nullable_type, - const std::string& pc_name, int32_t flags = 0) + const DataTypeVector& param_types, const DataTypePtr& ret_type, + const ResultNullableType& result_nullable_type, std::string pc_name, + int32_t flags = 0) : signatures_(), flags_(flags), result_nullable_type_(result_nullable_type), - pc_name_(pc_name) { - signatures_.push_back(FunctionSignature(base_name, param_types, ret_type)); + pc_name_(std::move(pc_name)) { + signatures_.emplace_back(base_name, param_types, ret_type); for (auto& func_name : aliases) { - signatures_.push_back(FunctionSignature(func_name, param_types, ret_type)); + signatures_.emplace_back(func_name, param_types, ret_type); } } diff --git a/cpp/src/gandiva/precompiled/CMakeLists.txt b/cpp/src/gandiva/precompiled/CMakeLists.txt index 3e41640861123..e62a8e3d4a375 100644 --- a/cpp/src/gandiva/precompiled/CMakeLists.txt +++ b/cpp/src/gandiva/precompiled/CMakeLists.txt @@ -15,8 +15,6 @@ # specific language governing permissions and limitations # under the License. -project(gandiva) - set(PRECOMPILED_SRCS arithmetic_ops.cc bitmap.cc @@ -29,69 +27,18 @@ set(PRECOMPILED_SRCS time.cc timestamp_arithmetic.cc ../../arrow/util/basic_decimal.cc) - -set(PLATFORM_CLANG_OPTIONS -std=c++17) -if(MSVC) - # "19.20" means that it's compatible with Visual Studio 16 2019. - # We can update this to "19.30" when we dropped support for Visual - # Studio 16 2019. - # - # See https://cmake.org/cmake/help/latest/variable/MSVC_VERSION.html - # for MSVC_VERSION and Visual Studio version. - set(FMS_COMPATIBILITY 19.20) - list(APPEND PLATFORM_CLANG_OPTIONS -fms-compatibility - -fms-compatibility-version=${FMS_COMPATIBILITY}) -endif() - -# Create bitcode for each of the source files. -foreach(SRC_FILE ${PRECOMPILED_SRCS}) - get_filename_component(SRC_BASE ${SRC_FILE} NAME_WE) - get_filename_component(ABSOLUTE_SRC ${SRC_FILE} ABSOLUTE) - set(BC_FILE ${CMAKE_CURRENT_BINARY_DIR}/${SRC_BASE}.bc) - set(PRECOMPILE_COMMAND) - if(CMAKE_OSX_SYSROOT) - list(APPEND - PRECOMPILE_COMMAND - ${CMAKE_COMMAND} - -E - env - SDKROOT=${CMAKE_OSX_SYSROOT}) - endif() - list(APPEND - PRECOMPILE_COMMAND - ${CLANG_EXECUTABLE} - ${PLATFORM_CLANG_OPTIONS} - -DGANDIVA_IR - -DNDEBUG # DCHECK macros not implemented in precompiled code - -DARROW_STATIC # Do not set __declspec(dllimport) on MSVC on Arrow symbols - -DGANDIVA_STATIC # Do not set __declspec(dllimport) on MSVC on Gandiva symbols - -fno-use-cxa-atexit # Workaround for unresolved __dso_handle - -emit-llvm - -O3 - -c - ${ABSOLUTE_SRC} - -o - ${BC_FILE} - ${ARROW_GANDIVA_PC_CXX_FLAGS} - -I${CMAKE_SOURCE_DIR}/src - -I${ARROW_BINARY_DIR}/src) - - if(NOT ARROW_USE_NATIVE_INT128) - foreach(boost_include_dir ${Boost_INCLUDE_DIRS}) - list(APPEND PRECOMPILE_COMMAND -I${boost_include_dir}) - endforeach() - endif() - add_custom_command(OUTPUT ${BC_FILE} - COMMAND ${PRECOMPILE_COMMAND} - DEPENDS ${SRC_FILE}) - list(APPEND BC_FILES ${BC_FILE}) +set(GANDIVA_PRECOMPILED_BC_FILES) +foreach(SOURCE ${PRECOMPILED_SRCS}) + gandiva_add_bitcode(${SOURCE}) + get_filename_component(SOURCE_BASE ${SOURCE} NAME_WE) + list(APPEND GANDIVA_PRECOMPILED_BC_FILES ${CMAKE_CURRENT_BINARY_DIR}/${SOURCE_BASE}.bc) endforeach() # link all of the bitcode files into a single bitcode file. add_custom_command(OUTPUT ${GANDIVA_PRECOMPILED_BC_PATH} COMMAND ${LLVM_LINK_EXECUTABLE} -o ${GANDIVA_PRECOMPILED_BC_PATH} - ${BC_FILES} - DEPENDS ${BC_FILES}) + ${GANDIVA_PRECOMPILED_BC_FILES} + DEPENDS ${GANDIVA_PRECOMPILED_BC_FILES}) # turn the bitcode file into a C++ static data variable. add_custom_command(OUTPUT ${GANDIVA_PRECOMPILED_CC_PATH} diff --git a/cpp/src/gandiva/projector.cc b/cpp/src/gandiva/projector.cc index 7024a3bc208af..e717e825dfc71 100644 --- a/cpp/src/gandiva/projector.cc +++ b/cpp/src/gandiva/projector.cc @@ -87,7 +87,8 @@ Status Projector::Make(SchemaPtr schema, const ExpressionVector& exprs, // Return if any of the expression is invalid since // we will not be able to process further. if (!is_cached) { - ExprValidator expr_validator(llvm_gen->types(), schema); + ExprValidator expr_validator(llvm_gen->types(), schema, + configuration->function_registry()); for (auto& expr : exprs) { ARROW_RETURN_NOT_OK(expr_validator.Validate(expr)); } diff --git a/cpp/src/gandiva/tests/CMakeLists.txt b/cpp/src/gandiva/tests/CMakeLists.txt index 5fa2da16c632f..68138f50d813d 100644 --- a/cpp/src/gandiva/tests/CMakeLists.txt +++ b/cpp/src/gandiva/tests/CMakeLists.txt @@ -15,28 +15,41 @@ # specific language governing permissions and limitations # under the License. -add_gandiva_test(filter_test) -add_gandiva_test(projector_test) -add_gandiva_test(projector_build_validation_test) -add_gandiva_test(if_expr_test) -add_gandiva_test(literal_test) -add_gandiva_test(boolean_expr_test) -add_gandiva_test(binary_test) -add_gandiva_test(date_time_test) -add_gandiva_test(to_string_test) -add_gandiva_test(utf8_test) -add_gandiva_test(hash_test) -add_gandiva_test(in_expr_test) -add_gandiva_test(null_validity_test) -add_gandiva_test(decimal_test) -add_gandiva_test(decimal_single_test) -add_gandiva_test(filter_project_test) +add_gandiva_test(projector-test + SOURCES + binary_test.cc + boolean_expr_test.cc + date_time_test.cc + decimal_single_test.cc + decimal_test.cc + filter_project_test.cc + filter_test.cc + hash_test.cc + huge_table_test.cc + if_expr_test.cc + in_expr_test.cc + literal_test.cc + null_validity_test.cc + projector_build_validation_test.cc + projector_test.cc + test_util.cc + to_string_test.cc + utf8_test.cc) if(ARROW_BUILD_STATIC) - add_gandiva_test(projector_test_static SOURCES projector_test.cc USE_STATIC_LINKING) + add_gandiva_test(projector_test_static + SOURCES + projector_test.cc + test_util.cc + USE_STATIC_LINKING) add_arrow_benchmark(micro_benchmarks + SOURCES + micro_benchmarks.cc + test_util.cc PREFIX "gandiva" EXTRA_LINK_LIBS gandiva_static) endif() + +add_subdirectory(external_functions) diff --git a/cpp/src/gandiva/tests/date_time_test.cc b/cpp/src/gandiva/tests/date_time_test.cc index ce1c3d05f6638..6208f1ecba9b5 100644 --- a/cpp/src/gandiva/tests/date_time_test.cc +++ b/cpp/src/gandiva/tests/date_time_test.cc @@ -36,7 +36,7 @@ using arrow::int32; using arrow::int64; using arrow::timestamp; -class TestProjector : public ::testing::Test { +class DateTimeTestProjector : public ::testing::Test { public: void SetUp() { pool_ = arrow::default_memory_pool(); } @@ -111,7 +111,7 @@ int32_t DaysSince(time_t base_line, int32_t yy, int32_t mm, int32_t dd, int32_t return static_cast(((ts - base_line) * 1000 + millis) / MILLIS_IN_DAY); } -TEST_F(TestProjector, TestIsNull) { +TEST_F(DateTimeTestProjector, TestIsNull) { auto d0 = field("d0", date64()); auto t0 = field("t0", time32(arrow::TimeUnit::MILLI)); auto schema = arrow::schema({d0, t0}); @@ -155,7 +155,7 @@ TEST_F(TestProjector, TestIsNull) { EXPECT_ARROW_ARRAY_EQUALS(exp_isnotnull, outputs.at(1)); } -TEST_F(TestProjector, TestDate32IsNull) { +TEST_F(DateTimeTestProjector, TestDate32IsNull) { auto d0 = field("d0", date32()); auto schema = arrow::schema({d0}); @@ -191,7 +191,7 @@ TEST_F(TestProjector, TestDate32IsNull) { EXPECT_ARROW_ARRAY_EQUALS(exp_isnull, outputs.at(0)); } -TEST_F(TestProjector, TestDateTime) { +TEST_F(DateTimeTestProjector, TestDateTime) { auto field0 = field("f0", date64()); auto field1 = field("f1", date32()); auto field2 = field("f2", timestamp(arrow::TimeUnit::MILLI)); @@ -292,7 +292,7 @@ TEST_F(TestProjector, TestDateTime) { EXPECT_ARROW_ARRAY_EQUALS(exp_dd_from_ts, outputs.at(5)); } -TEST_F(TestProjector, TestTime) { +TEST_F(DateTimeTestProjector, TestTime) { auto field0 = field("f0", time32(arrow::TimeUnit::MILLI)); auto schema = arrow::schema({field0}); @@ -339,7 +339,7 @@ TEST_F(TestProjector, TestTime) { EXPECT_ARROW_ARRAY_EQUALS(exp_hour, outputs.at(1)); } -TEST_F(TestProjector, TestTimestampDiff) { +TEST_F(DateTimeTestProjector, TestTimestampDiff) { auto f0 = field("f0", timestamp(arrow::TimeUnit::MILLI)); auto f1 = field("f1", timestamp(arrow::TimeUnit::MILLI)); auto schema = arrow::schema({f0, f1}); @@ -439,7 +439,7 @@ TEST_F(TestProjector, TestTimestampDiff) { } } -TEST_F(TestProjector, TestTimestampDiffMonth) { +TEST_F(DateTimeTestProjector, TestTimestampDiffMonth) { auto f0 = field("f0", timestamp(arrow::TimeUnit::MILLI)); auto f1 = field("f1", timestamp(arrow::TimeUnit::MILLI)); auto schema = arrow::schema({f0, f1}); @@ -497,7 +497,7 @@ TEST_F(TestProjector, TestTimestampDiffMonth) { } } -TEST_F(TestProjector, TestMonthsBetween) { +TEST_F(DateTimeTestProjector, TestMonthsBetween) { auto f0 = field("f0", arrow::date64()); auto f1 = field("f1", arrow::date64()); auto schema = arrow::schema({f0, f1}); @@ -550,7 +550,7 @@ TEST_F(TestProjector, TestMonthsBetween) { EXPECT_ARROW_ARRAY_EQUALS(exp_output, outputs.at(0)); } -TEST_F(TestProjector, TestCastTimestampFromInt64) { +TEST_F(DateTimeTestProjector, TestCastTimestampFromInt64) { auto f0 = field("f0", arrow::int64()); auto schema = arrow::schema({f0}); @@ -600,7 +600,7 @@ TEST_F(TestProjector, TestCastTimestampFromInt64) { EXPECT_ARROW_ARRAY_EQUALS(exp_output, outputs.at(0)); } -TEST_F(TestProjector, TestLastDay) { +TEST_F(DateTimeTestProjector, TestLastDay) { auto f0 = field("f0", arrow::date64()); auto schema = arrow::schema({f0}); @@ -650,7 +650,7 @@ TEST_F(TestProjector, TestLastDay) { EXPECT_ARROW_ARRAY_EQUALS(exp_output, outputs.at(0)); } -TEST_F(TestProjector, TestToTimestampFromInt) { +TEST_F(DateTimeTestProjector, TestToTimestampFromInt) { auto f0 = field("f0", arrow::int32()); auto f1 = field("f1", arrow::int64()); auto f2 = field("f2", arrow::float32()); @@ -721,7 +721,7 @@ TEST_F(TestProjector, TestToTimestampFromInt) { EXPECT_ARROW_ARRAY_EQUALS(exp_output1, outputs.at(3)); } -TEST_F(TestProjector, TestToUtcTimestamp) { +TEST_F(DateTimeTestProjector, TestToUtcTimestamp) { auto f0 = field("f0", timestamp(arrow::TimeUnit::MILLI)); auto f1 = field("f1", arrow::utf8()); @@ -775,7 +775,7 @@ TEST_F(TestProjector, TestToUtcTimestamp) { EXPECT_ARROW_ARRAY_EQUALS(exp_output, outputs.at(0)); } -TEST_F(TestProjector, TestFromUtcTimestamp) { +TEST_F(DateTimeTestProjector, TestFromUtcTimestamp) { auto f0 = field("f0", timestamp(arrow::TimeUnit::MILLI)); auto f1 = field("f1", arrow::utf8()); diff --git a/cpp/src/gandiva/tests/external_functions/CMakeLists.txt b/cpp/src/gandiva/tests/external_functions/CMakeLists.txt new file mode 100644 index 0000000000000..c309549e874e3 --- /dev/null +++ b/cpp/src/gandiva/tests/external_functions/CMakeLists.txt @@ -0,0 +1,50 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +if(NO_TESTS) + return() +endif() +# +## copy the testing data into the build directory +add_custom_target(extension-tests-data + COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_CURRENT_SOURCE_DIR} + ${CMAKE_CURRENT_BINARY_DIR}) + +set(TEST_PRECOMPILED_SOURCES multiply_by_two.cc) +set(TEST_PRECOMPILED_BC_FILES) +foreach(SOURCE ${TEST_PRECOMPILED_SOURCES}) + gandiva_add_bitcode(${SOURCE}) + get_filename_component(SOURCE_BASE ${SOURCE} NAME_WE) + list(APPEND TEST_PRECOMPILED_BC_FILES ${CMAKE_CURRENT_BINARY_DIR}/${SOURCE_BASE}.bc) +endforeach() +add_custom_target(extension-tests ALL DEPENDS extension-tests-data + ${TEST_PRECOMPILED_BC_FILES}) +# +## set the GANDIVA_EXTENSION_TEST_DIR macro so that the tests can pass regardless where they are run from +## corresponding extension test data files and bitcode will be copied/generated +set(TARGETS_DEPENDING_ON_TEST_BITCODE_FILES gandiva-internals-test gandiva-projector-test + gandiva-projector-test-static) +foreach(TARGET ${TARGETS_DEPENDING_ON_TEST_BITCODE_FILES}) + if(TARGET ${TARGET}) + add_dependencies(${TARGET} extension-tests) + target_compile_definitions(${TARGET} + PRIVATE -DGANDIVA_EXTENSION_TEST_DIR="${CMAKE_CURRENT_BINARY_DIR}" + ) + endif() +endforeach() + +add_dependencies(gandiva-tests extension-tests) diff --git a/cpp/src/gandiva/tests/external_functions/multiply_by_two.cc b/cpp/src/gandiva/tests/external_functions/multiply_by_two.cc new file mode 100644 index 0000000000000..cc7e2b0f8267f --- /dev/null +++ b/cpp/src/gandiva/tests/external_functions/multiply_by_two.cc @@ -0,0 +1,20 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include "multiply_by_two.h" // NOLINT + +int64_t multiply_by_two_int32(int32_t value) { return value * 2; } diff --git a/cpp/src/gandiva/tests/external_functions/multiply_by_two.h b/cpp/src/gandiva/tests/external_functions/multiply_by_two.h new file mode 100644 index 0000000000000..b8aec5185457b --- /dev/null +++ b/cpp/src/gandiva/tests/external_functions/multiply_by_two.h @@ -0,0 +1,24 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#pragma once + +#include + +extern "C" { +int64_t multiply_by_two_int32(int32_t value); +} diff --git a/cpp/src/gandiva/tests/filter_test.cc b/cpp/src/gandiva/tests/filter_test.cc index effd31cc27aa0..749000aa0cf27 100644 --- a/cpp/src/gandiva/tests/filter_test.cc +++ b/cpp/src/gandiva/tests/filter_test.cc @@ -42,8 +42,8 @@ class TestFilter : public ::testing::Test { TEST_F(TestFilter, TestFilterCache) { // schema for input fields - auto field0 = field("f0", int32()); - auto field1 = field("f1", int32()); + auto field0 = field("f0_filter_cache", int32()); + auto field1 = field("f1_filter_cache", int32()); auto schema = arrow::schema({field0, field1}); // Build condition f0 + f1 < 10 @@ -69,7 +69,7 @@ TEST_F(TestFilter, TestFilterCache) { EXPECT_TRUE(cached_filter->GetBuiltFromCache()); // schema is different should return a new filter. - auto field2 = field("f2", int32()); + auto field2 = field("f2_filter_cache", int32()); auto different_schema = arrow::schema({field0, field1, field2}); std::shared_ptr should_be_new_filter; status = diff --git a/cpp/src/gandiva/tests/huge_table_test.cc b/cpp/src/gandiva/tests/huge_table_test.cc index 46f814b472d84..34c8512f1b0a9 100644 --- a/cpp/src/gandiva/tests/huge_table_test.cc +++ b/cpp/src/gandiva/tests/huge_table_test.cc @@ -139,8 +139,11 @@ TEST_F(LARGE_MEMORY_TEST(TestHugeFilter), TestSimpleHugeFilter) { auto status = Filter::Make(schema, condition, TestConfiguration(), &filter); EXPECT_TRUE(status.ok()); + auto array1 = MakeArrowArray(arr1, validity); + auto array2 = MakeArrowArray(arr2, validity); + // prepare input record batch - auto in_batch = arrow::RecordBatch::Make(schema, num_records, {arr1, arr2}); + auto in_batch = arrow::RecordBatch::Make(schema, num_records, {array1, array2}); std::shared_ptr selection_vector; status = SelectionVector::MakeInt64(num_records, pool_, &selection_vector); diff --git a/cpp/src/gandiva/tests/projector_build_validation_test.cc b/cpp/src/gandiva/tests/projector_build_validation_test.cc index 5b86844f940bf..1ed4c77a074ab 100644 --- a/cpp/src/gandiva/tests/projector_build_validation_test.cc +++ b/cpp/src/gandiva/tests/projector_build_validation_test.cc @@ -27,7 +27,7 @@ using arrow::boolean; using arrow::float32; using arrow::int32; -class TestProjector : public ::testing::Test { +class ValidationTestProjector : public ::testing::Test { public: void SetUp() { pool_ = arrow::default_memory_pool(); } @@ -35,7 +35,7 @@ class TestProjector : public ::testing::Test { arrow::MemoryPool* pool_; }; -TEST_F(TestProjector, TestNonexistentFunction) { +TEST_F(ValidationTestProjector, TestNonexistentFunction) { // schema for input fields auto field0 = field("f0", float32()); auto field1 = field("f2", float32()); @@ -57,7 +57,7 @@ TEST_F(TestProjector, TestNonexistentFunction) { EXPECT_TRUE(status.message().find(expected_error) != std::string::npos); } -TEST_F(TestProjector, TestNotMatchingDataType) { +TEST_F(ValidationTestProjector, TestNotMatchingDataType) { // schema for input fields auto field0 = field("f0", float32()); auto schema = arrow::schema({field0}); @@ -78,7 +78,7 @@ TEST_F(TestProjector, TestNotMatchingDataType) { EXPECT_TRUE(status.message().find(expected_error) != std::string::npos); } -TEST_F(TestProjector, TestNotSupportedDataType) { +TEST_F(ValidationTestProjector, TestNotSupportedDataType) { // schema for input fields auto field0 = field("f0", list(int32())); auto schema = arrow::schema({field0}); @@ -98,7 +98,7 @@ TEST_F(TestProjector, TestNotSupportedDataType) { EXPECT_TRUE(status.message().find(expected_error) != std::string::npos); } -TEST_F(TestProjector, TestIncorrectSchemaMissingField) { +TEST_F(ValidationTestProjector, TestIncorrectSchemaMissingField) { // schema for input fields auto field0 = field("f0", float32()); auto field1 = field("f2", float32()); @@ -119,7 +119,7 @@ TEST_F(TestProjector, TestIncorrectSchemaMissingField) { EXPECT_TRUE(status.message().find(expected_error) != std::string::npos); } -TEST_F(TestProjector, TestIncorrectSchemaTypeNotMatching) { +TEST_F(ValidationTestProjector, TestIncorrectSchemaTypeNotMatching) { // schema for input fields auto field0 = field("f0", float32()); auto field1 = field("f2", float32()); @@ -142,7 +142,7 @@ TEST_F(TestProjector, TestIncorrectSchemaTypeNotMatching) { EXPECT_TRUE(status.message().find(expected_error) != std::string::npos); } -TEST_F(TestProjector, TestIfNotSupportedFunction) { +TEST_F(ValidationTestProjector, TestIfNotSupportedFunction) { // schema for input fields auto fielda = field("a", int32()); auto fieldb = field("b", int32()); @@ -170,7 +170,7 @@ TEST_F(TestProjector, TestIfNotSupportedFunction) { EXPECT_TRUE(status.IsExpressionValidationError()); } -TEST_F(TestProjector, TestIfNotMatchingReturnType) { +TEST_F(ValidationTestProjector, TestIfNotMatchingReturnType) { // schema for input fields auto fielda = field("a", int32()); auto fieldb = field("b", int32()); @@ -193,7 +193,7 @@ TEST_F(TestProjector, TestIfNotMatchingReturnType) { EXPECT_TRUE(status.IsExpressionValidationError()); } -TEST_F(TestProjector, TestElseNotMatchingReturnType) { +TEST_F(ValidationTestProjector, TestElseNotMatchingReturnType) { // schema for input fields auto fielda = field("a", int32()); auto fieldb = field("b", int32()); @@ -218,7 +218,7 @@ TEST_F(TestProjector, TestElseNotMatchingReturnType) { EXPECT_TRUE(status.IsExpressionValidationError()); } -TEST_F(TestProjector, TestElseNotSupportedType) { +TEST_F(ValidationTestProjector, TestElseNotSupportedType) { // schema for input fields auto fielda = field("a", int32()); auto fieldb = field("b", int32()); @@ -244,7 +244,7 @@ TEST_F(TestProjector, TestElseNotSupportedType) { EXPECT_EQ(status.code(), StatusCode::ExpressionValidationError); } -TEST_F(TestProjector, TestAndMinChildren) { +TEST_F(ValidationTestProjector, TestAndMinChildren) { // schema for input fields auto fielda = field("a", boolean()); auto schema = arrow::schema({fielda}); @@ -263,7 +263,7 @@ TEST_F(TestProjector, TestAndMinChildren) { EXPECT_TRUE(status.IsExpressionValidationError()); } -TEST_F(TestProjector, TestAndBooleanArgType) { +TEST_F(ValidationTestProjector, TestAndBooleanArgType) { // schema for input fields auto fielda = field("a", boolean()); auto fieldb = field("b", int32()); diff --git a/cpp/src/gandiva/tests/projector_test.cc b/cpp/src/gandiva/tests/projector_test.cc index 462fae64393fd..38566fb408ab5 100644 --- a/cpp/src/gandiva/tests/projector_test.cc +++ b/cpp/src/gandiva/tests/projector_test.cc @@ -26,6 +26,7 @@ #include #include "arrow/memory_pool.h" +#include "gandiva/function_registry.h" #include "gandiva/literal_holder.h" #include "gandiva/node.h" #include "gandiva/tests/test_util.h" @@ -3582,4 +3583,29 @@ TEST_F(TestProjector, TestSqrtFloat64) { EXPECT_ARROW_ARRAY_EQUALS(out, outs.at(0)); } +TEST_F(TestProjector, TestExtendedFunctions) { + auto in_field = field("in", arrow::int32()); + auto schema = arrow::schema({in_field}); + auto out_field = field("out", arrow::int64()); + // the multiply_by_two function is only available in the external function's IR bitcode + auto multiply = + TreeExprBuilder::MakeExpression("multiply_by_two", {in_field}, out_field); + + std::shared_ptr projector; + auto external_registry = std::make_shared(); + auto config_with_func_registry = + TestConfigurationWithFunctionRegistry(std::move(external_registry)); + ARROW_EXPECT_OK( + Projector::Make(schema, {multiply}, config_with_func_registry, &projector)); + + int num_records = 4; + auto array = MakeArrowArrayInt32({1, 2, 3, 4}, {true, true, true, true}); + auto in_batch = arrow::RecordBatch::Make(schema, num_records, {array}); + auto out = MakeArrowArrayInt64({2, 4, 6, 8}, {true, true, true, true}); + + arrow::ArrayVector outs; + ARROW_EXPECT_OK(projector->Evaluate(*in_batch, pool_, &outs)); + EXPECT_ARROW_ARRAY_EQUALS(out, outs.at(0)); +} + } // namespace gandiva diff --git a/cpp/src/gandiva/tests/test_util.cc b/cpp/src/gandiva/tests/test_util.cc new file mode 100644 index 0000000000000..4a0a15c7223db --- /dev/null +++ b/cpp/src/gandiva/tests/test_util.cc @@ -0,0 +1,53 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include "gandiva/tests/test_util.h" + +#include "arrow/util/io_util.h" +#include "arrow/util/logging.h" + +namespace gandiva { +std::shared_ptr TestConfiguration() { + return ConfigurationBuilder::DefaultConfiguration(); +} + +#ifndef GANDIVA_EXTENSION_TEST_DIR +#define GANDIVA_EXTENSION_TEST_DIR "." +#endif + +std::string GetTestFunctionLLVMIRPath() { + const auto base = + arrow::internal::PlatformFilename::FromString(GANDIVA_EXTENSION_TEST_DIR); + DCHECK_OK(base.status()); + return base->Join("multiply_by_two.bc")->ToString(); +} + +NativeFunction GetTestExternalFunction() { + NativeFunction multiply_by_two_func( + "multiply_by_two", {}, {arrow::int32()}, arrow::int64(), + ResultNullableType::kResultNullIfNull, "multiply_by_two_int32"); + return multiply_by_two_func; +} + +std::shared_ptr TestConfigurationWithFunctionRegistry( + std::shared_ptr registry) { + ARROW_EXPECT_OK( + registry->Register({GetTestExternalFunction()}, GetTestFunctionLLVMIRPath())); + auto external_func_config = ConfigurationBuilder().build(std::move(registry)); + return external_func_config; +} +} // namespace gandiva diff --git a/cpp/src/gandiva/tests/test_util.h b/cpp/src/gandiva/tests/test_util.h index 99df90769e0ad..e431e53096c2c 100644 --- a/cpp/src/gandiva/tests/test_util.h +++ b/cpp/src/gandiva/tests/test_util.h @@ -96,9 +96,12 @@ static inline ArrayPtr MakeArrowTypeArray(const std::shared_ptr EXPECT_TRUE((a)->Equals(b)) << "expected type: " << (a)->ToString() \ << " actual type: " << (b)->ToString() -static inline std::shared_ptr TestConfiguration() { - auto builder = ConfigurationBuilder(); - return builder.DefaultConfiguration(); -} +std::shared_ptr TestConfiguration(); + +std::shared_ptr TestConfigurationWithFunctionRegistry( + std::shared_ptr registry); + +std::string GetTestFunctionLLVMIRPath(); +NativeFunction GetTestExternalFunction(); } // namespace gandiva diff --git a/cpp/src/gandiva/tree_expr_test.cc b/cpp/src/gandiva/tree_expr_test.cc index e70cf12898124..86a826f29367f 100644 --- a/cpp/src/gandiva/tree_expr_test.cc +++ b/cpp/src/gandiva/tree_expr_test.cc @@ -45,7 +45,7 @@ class TestExprTree : public ::testing::Test { FieldPtr i1_; // int32 FieldPtr b0_; // bool - FunctionRegistry registry_; + std::shared_ptr registry_ = gandiva::default_function_registry(); }; TEST_F(TestExprTree, TestField) { @@ -57,7 +57,7 @@ TEST_F(TestExprTree, TestField) { auto n1 = TreeExprBuilder::MakeField(b0_); EXPECT_EQ(n1->return_type(), boolean()); - ExprDecomposer decomposer(registry_, annotator); + ExprDecomposer decomposer(*registry_, annotator); ValueValidityPairPtr pair; auto status = decomposer.Decompose(*n1, &pair); DCHECK_EQ(status.ok(), true) << status.message(); @@ -88,7 +88,7 @@ TEST_F(TestExprTree, TestBinary) { EXPECT_EQ(add->return_type(), int32()); EXPECT_TRUE(sign == FunctionSignature("add", {int32(), int32()}, int32())); - ExprDecomposer decomposer(registry_, annotator); + ExprDecomposer decomposer(*registry_, annotator); ValueValidityPairPtr pair; auto status = decomposer.Decompose(*n, &pair); DCHECK_EQ(status.ok(), true) << status.message(); @@ -97,7 +97,7 @@ TEST_F(TestExprTree, TestBinary) { auto null_if_null = std::dynamic_pointer_cast(value); FunctionSignature signature("add", {int32(), int32()}, int32()); - const NativeFunction* fn = registry_.LookupSignature(signature); + const NativeFunction* fn = registry_->LookupSignature(signature); EXPECT_EQ(null_if_null->native_function(), fn); } @@ -114,7 +114,7 @@ TEST_F(TestExprTree, TestUnary) { EXPECT_EQ(unaryFn->return_type(), boolean()); EXPECT_TRUE(sign == FunctionSignature("isnumeric", {int32()}, boolean())); - ExprDecomposer decomposer(registry_, annotator); + ExprDecomposer decomposer(*registry_, annotator); ValueValidityPairPtr pair; auto status = decomposer.Decompose(*n, &pair); DCHECK_EQ(status.ok(), true) << status.message(); @@ -123,7 +123,7 @@ TEST_F(TestExprTree, TestUnary) { auto never_null = std::dynamic_pointer_cast(value); FunctionSignature signature("isnumeric", {int32()}, boolean()); - const NativeFunction* fn = registry_.LookupSignature(signature); + const NativeFunction* fn = registry_->LookupSignature(signature); EXPECT_EQ(never_null->native_function(), fn); } @@ -143,7 +143,7 @@ TEST_F(TestExprTree, TestExpression) { func_desc->return_type()); EXPECT_TRUE(sign == FunctionSignature("add", {int32(), int32()}, int32())); - ExprDecomposer decomposer(registry_, annotator); + ExprDecomposer decomposer(*registry_, annotator); ValueValidityPairPtr pair; auto status = decomposer.Decompose(*root_node, &pair); DCHECK_EQ(status.ok(), true) << status.message(); @@ -152,7 +152,7 @@ TEST_F(TestExprTree, TestExpression) { auto null_if_null = std::dynamic_pointer_cast(value); FunctionSignature signature("add", {int32(), int32()}, int32()); - const NativeFunction* fn = registry_.LookupSignature(signature); + const NativeFunction* fn = registry_->LookupSignature(signature); EXPECT_EQ(null_if_null->native_function(), fn); } diff --git a/cpp/src/generated/parquet_types.cpp b/cpp/src/generated/parquet_types.cpp index f4e378fd3822a..86188581e0c42 100644 --- a/cpp/src/generated/parquet_types.cpp +++ b/cpp/src/generated/parquet_types.cpp @@ -1288,6 +1288,81 @@ void DateType::printTo(std::ostream& out) const { } +Float16Type::~Float16Type() noexcept { +} + +std::ostream& operator<<(std::ostream& out, const Float16Type& obj) +{ + obj.printTo(out); + return out; +} + + +uint32_t Float16Type::read(::apache::thrift::protocol::TProtocol* iprot) { + + ::apache::thrift::protocol::TInputRecursionTracker tracker(*iprot); + uint32_t xfer = 0; + std::string fname; + ::apache::thrift::protocol::TType ftype; + int16_t fid; + + xfer += iprot->readStructBegin(fname); + + using ::apache::thrift::protocol::TProtocolException; + + + while (true) + { + xfer += iprot->readFieldBegin(fname, ftype, fid); + if (ftype == ::apache::thrift::protocol::T_STOP) { + break; + } + xfer += iprot->skip(ftype); + xfer += iprot->readFieldEnd(); + } + + xfer += iprot->readStructEnd(); + + return xfer; +} + +uint32_t Float16Type::write(::apache::thrift::protocol::TProtocol* oprot) const { + uint32_t xfer = 0; + ::apache::thrift::protocol::TOutputRecursionTracker tracker(*oprot); + xfer += oprot->writeStructBegin("Float16Type"); + + xfer += oprot->writeFieldStop(); + xfer += oprot->writeStructEnd(); + return xfer; +} + +void swap(Float16Type &a, Float16Type &b) { + using ::std::swap; + (void) a; + (void) b; +} + +Float16Type::Float16Type(const Float16Type& other28) noexcept { + (void) other28; +} +Float16Type::Float16Type(Float16Type&& other29) noexcept { + (void) other29; +} +Float16Type& Float16Type::operator=(const Float16Type& other30) noexcept { + (void) other30; + return *this; +} +Float16Type& Float16Type::operator=(Float16Type&& other31) noexcept { + (void) other31; + return *this; +} +void Float16Type::printTo(std::ostream& out) const { + using ::apache::thrift::to_string; + out << "Float16Type("; + out << ")"; +} + + NullType::~NullType() noexcept { } @@ -1342,18 +1417,18 @@ void swap(NullType &a, NullType &b) { (void) b; } -NullType::NullType(const NullType& other28) noexcept { - (void) other28; +NullType::NullType(const NullType& other32) noexcept { + (void) other32; } -NullType::NullType(NullType&& other29) noexcept { - (void) other29; +NullType::NullType(NullType&& other33) noexcept { + (void) other33; } -NullType& NullType::operator=(const NullType& other30) noexcept { - (void) other30; +NullType& NullType::operator=(const NullType& other34) noexcept { + (void) other34; return *this; } -NullType& NullType::operator=(NullType&& other31) noexcept { - (void) other31; +NullType& NullType::operator=(NullType&& other35) noexcept { + (void) other35; return *this; } void NullType::printTo(std::ostream& out) const { @@ -1460,22 +1535,22 @@ void swap(DecimalType &a, DecimalType &b) { swap(a.precision, b.precision); } -DecimalType::DecimalType(const DecimalType& other32) noexcept { - scale = other32.scale; - precision = other32.precision; +DecimalType::DecimalType(const DecimalType& other36) noexcept { + scale = other36.scale; + precision = other36.precision; } -DecimalType::DecimalType(DecimalType&& other33) noexcept { - scale = other33.scale; - precision = other33.precision; +DecimalType::DecimalType(DecimalType&& other37) noexcept { + scale = other37.scale; + precision = other37.precision; } -DecimalType& DecimalType::operator=(const DecimalType& other34) noexcept { - scale = other34.scale; - precision = other34.precision; +DecimalType& DecimalType::operator=(const DecimalType& other38) noexcept { + scale = other38.scale; + precision = other38.precision; return *this; } -DecimalType& DecimalType::operator=(DecimalType&& other35) noexcept { - scale = other35.scale; - precision = other35.precision; +DecimalType& DecimalType::operator=(DecimalType&& other39) noexcept { + scale = other39.scale; + precision = other39.precision; return *this; } void DecimalType::printTo(std::ostream& out) const { @@ -1541,18 +1616,18 @@ void swap(MilliSeconds &a, MilliSeconds &b) { (void) b; } -MilliSeconds::MilliSeconds(const MilliSeconds& other36) noexcept { - (void) other36; +MilliSeconds::MilliSeconds(const MilliSeconds& other40) noexcept { + (void) other40; } -MilliSeconds::MilliSeconds(MilliSeconds&& other37) noexcept { - (void) other37; +MilliSeconds::MilliSeconds(MilliSeconds&& other41) noexcept { + (void) other41; } -MilliSeconds& MilliSeconds::operator=(const MilliSeconds& other38) noexcept { - (void) other38; +MilliSeconds& MilliSeconds::operator=(const MilliSeconds& other42) noexcept { + (void) other42; return *this; } -MilliSeconds& MilliSeconds::operator=(MilliSeconds&& other39) noexcept { - (void) other39; +MilliSeconds& MilliSeconds::operator=(MilliSeconds&& other43) noexcept { + (void) other43; return *this; } void MilliSeconds::printTo(std::ostream& out) const { @@ -1616,18 +1691,18 @@ void swap(MicroSeconds &a, MicroSeconds &b) { (void) b; } -MicroSeconds::MicroSeconds(const MicroSeconds& other40) noexcept { - (void) other40; +MicroSeconds::MicroSeconds(const MicroSeconds& other44) noexcept { + (void) other44; } -MicroSeconds::MicroSeconds(MicroSeconds&& other41) noexcept { - (void) other41; +MicroSeconds::MicroSeconds(MicroSeconds&& other45) noexcept { + (void) other45; } -MicroSeconds& MicroSeconds::operator=(const MicroSeconds& other42) noexcept { - (void) other42; +MicroSeconds& MicroSeconds::operator=(const MicroSeconds& other46) noexcept { + (void) other46; return *this; } -MicroSeconds& MicroSeconds::operator=(MicroSeconds&& other43) noexcept { - (void) other43; +MicroSeconds& MicroSeconds::operator=(MicroSeconds&& other47) noexcept { + (void) other47; return *this; } void MicroSeconds::printTo(std::ostream& out) const { @@ -1691,18 +1766,18 @@ void swap(NanoSeconds &a, NanoSeconds &b) { (void) b; } -NanoSeconds::NanoSeconds(const NanoSeconds& other44) noexcept { - (void) other44; +NanoSeconds::NanoSeconds(const NanoSeconds& other48) noexcept { + (void) other48; } -NanoSeconds::NanoSeconds(NanoSeconds&& other45) noexcept { - (void) other45; +NanoSeconds::NanoSeconds(NanoSeconds&& other49) noexcept { + (void) other49; } -NanoSeconds& NanoSeconds::operator=(const NanoSeconds& other46) noexcept { - (void) other46; +NanoSeconds& NanoSeconds::operator=(const NanoSeconds& other50) noexcept { + (void) other50; return *this; } -NanoSeconds& NanoSeconds::operator=(NanoSeconds&& other47) noexcept { - (void) other47; +NanoSeconds& NanoSeconds::operator=(NanoSeconds&& other51) noexcept { + (void) other51; return *this; } void NanoSeconds::printTo(std::ostream& out) const { @@ -1827,30 +1902,30 @@ void swap(TimeUnit &a, TimeUnit &b) { swap(a.__isset, b.__isset); } -TimeUnit::TimeUnit(const TimeUnit& other48) noexcept { - MILLIS = other48.MILLIS; - MICROS = other48.MICROS; - NANOS = other48.NANOS; - __isset = other48.__isset; +TimeUnit::TimeUnit(const TimeUnit& other52) noexcept { + MILLIS = other52.MILLIS; + MICROS = other52.MICROS; + NANOS = other52.NANOS; + __isset = other52.__isset; } -TimeUnit::TimeUnit(TimeUnit&& other49) noexcept { - MILLIS = std::move(other49.MILLIS); - MICROS = std::move(other49.MICROS); - NANOS = std::move(other49.NANOS); - __isset = other49.__isset; +TimeUnit::TimeUnit(TimeUnit&& other53) noexcept { + MILLIS = std::move(other53.MILLIS); + MICROS = std::move(other53.MICROS); + NANOS = std::move(other53.NANOS); + __isset = other53.__isset; } -TimeUnit& TimeUnit::operator=(const TimeUnit& other50) noexcept { - MILLIS = other50.MILLIS; - MICROS = other50.MICROS; - NANOS = other50.NANOS; - __isset = other50.__isset; +TimeUnit& TimeUnit::operator=(const TimeUnit& other54) noexcept { + MILLIS = other54.MILLIS; + MICROS = other54.MICROS; + NANOS = other54.NANOS; + __isset = other54.__isset; return *this; } -TimeUnit& TimeUnit::operator=(TimeUnit&& other51) noexcept { - MILLIS = std::move(other51.MILLIS); - MICROS = std::move(other51.MICROS); - NANOS = std::move(other51.NANOS); - __isset = other51.__isset; +TimeUnit& TimeUnit::operator=(TimeUnit&& other55) noexcept { + MILLIS = std::move(other55.MILLIS); + MICROS = std::move(other55.MICROS); + NANOS = std::move(other55.NANOS); + __isset = other55.__isset; return *this; } void TimeUnit::printTo(std::ostream& out) const { @@ -1960,22 +2035,22 @@ void swap(TimestampType &a, TimestampType &b) { swap(a.unit, b.unit); } -TimestampType::TimestampType(const TimestampType& other52) noexcept { - isAdjustedToUTC = other52.isAdjustedToUTC; - unit = other52.unit; +TimestampType::TimestampType(const TimestampType& other56) noexcept { + isAdjustedToUTC = other56.isAdjustedToUTC; + unit = other56.unit; } -TimestampType::TimestampType(TimestampType&& other53) noexcept { - isAdjustedToUTC = other53.isAdjustedToUTC; - unit = std::move(other53.unit); +TimestampType::TimestampType(TimestampType&& other57) noexcept { + isAdjustedToUTC = other57.isAdjustedToUTC; + unit = std::move(other57.unit); } -TimestampType& TimestampType::operator=(const TimestampType& other54) noexcept { - isAdjustedToUTC = other54.isAdjustedToUTC; - unit = other54.unit; +TimestampType& TimestampType::operator=(const TimestampType& other58) noexcept { + isAdjustedToUTC = other58.isAdjustedToUTC; + unit = other58.unit; return *this; } -TimestampType& TimestampType::operator=(TimestampType&& other55) noexcept { - isAdjustedToUTC = other55.isAdjustedToUTC; - unit = std::move(other55.unit); +TimestampType& TimestampType::operator=(TimestampType&& other59) noexcept { + isAdjustedToUTC = other59.isAdjustedToUTC; + unit = std::move(other59.unit); return *this; } void TimestampType::printTo(std::ostream& out) const { @@ -2084,22 +2159,22 @@ void swap(TimeType &a, TimeType &b) { swap(a.unit, b.unit); } -TimeType::TimeType(const TimeType& other56) noexcept { - isAdjustedToUTC = other56.isAdjustedToUTC; - unit = other56.unit; +TimeType::TimeType(const TimeType& other60) noexcept { + isAdjustedToUTC = other60.isAdjustedToUTC; + unit = other60.unit; } -TimeType::TimeType(TimeType&& other57) noexcept { - isAdjustedToUTC = other57.isAdjustedToUTC; - unit = std::move(other57.unit); +TimeType::TimeType(TimeType&& other61) noexcept { + isAdjustedToUTC = other61.isAdjustedToUTC; + unit = std::move(other61.unit); } -TimeType& TimeType::operator=(const TimeType& other58) noexcept { - isAdjustedToUTC = other58.isAdjustedToUTC; - unit = other58.unit; +TimeType& TimeType::operator=(const TimeType& other62) noexcept { + isAdjustedToUTC = other62.isAdjustedToUTC; + unit = other62.unit; return *this; } -TimeType& TimeType::operator=(TimeType&& other59) noexcept { - isAdjustedToUTC = other59.isAdjustedToUTC; - unit = std::move(other59.unit); +TimeType& TimeType::operator=(TimeType&& other63) noexcept { + isAdjustedToUTC = other63.isAdjustedToUTC; + unit = std::move(other63.unit); return *this; } void TimeType::printTo(std::ostream& out) const { @@ -2208,22 +2283,22 @@ void swap(IntType &a, IntType &b) { swap(a.isSigned, b.isSigned); } -IntType::IntType(const IntType& other60) noexcept { - bitWidth = other60.bitWidth; - isSigned = other60.isSigned; +IntType::IntType(const IntType& other64) noexcept { + bitWidth = other64.bitWidth; + isSigned = other64.isSigned; } -IntType::IntType(IntType&& other61) noexcept { - bitWidth = other61.bitWidth; - isSigned = other61.isSigned; +IntType::IntType(IntType&& other65) noexcept { + bitWidth = other65.bitWidth; + isSigned = other65.isSigned; } -IntType& IntType::operator=(const IntType& other62) noexcept { - bitWidth = other62.bitWidth; - isSigned = other62.isSigned; +IntType& IntType::operator=(const IntType& other66) noexcept { + bitWidth = other66.bitWidth; + isSigned = other66.isSigned; return *this; } -IntType& IntType::operator=(IntType&& other63) noexcept { - bitWidth = other63.bitWidth; - isSigned = other63.isSigned; +IntType& IntType::operator=(IntType&& other67) noexcept { + bitWidth = other67.bitWidth; + isSigned = other67.isSigned; return *this; } void IntType::printTo(std::ostream& out) const { @@ -2289,18 +2364,18 @@ void swap(JsonType &a, JsonType &b) { (void) b; } -JsonType::JsonType(const JsonType& other64) noexcept { - (void) other64; +JsonType::JsonType(const JsonType& other68) noexcept { + (void) other68; } -JsonType::JsonType(JsonType&& other65) noexcept { - (void) other65; +JsonType::JsonType(JsonType&& other69) noexcept { + (void) other69; } -JsonType& JsonType::operator=(const JsonType& other66) noexcept { - (void) other66; +JsonType& JsonType::operator=(const JsonType& other70) noexcept { + (void) other70; return *this; } -JsonType& JsonType::operator=(JsonType&& other67) noexcept { - (void) other67; +JsonType& JsonType::operator=(JsonType&& other71) noexcept { + (void) other71; return *this; } void JsonType::printTo(std::ostream& out) const { @@ -2364,18 +2439,18 @@ void swap(BsonType &a, BsonType &b) { (void) b; } -BsonType::BsonType(const BsonType& other68) noexcept { - (void) other68; +BsonType::BsonType(const BsonType& other72) noexcept { + (void) other72; } -BsonType::BsonType(BsonType&& other69) noexcept { - (void) other69; +BsonType::BsonType(BsonType&& other73) noexcept { + (void) other73; } -BsonType& BsonType::operator=(const BsonType& other70) noexcept { - (void) other70; +BsonType& BsonType::operator=(const BsonType& other74) noexcept { + (void) other74; return *this; } -BsonType& BsonType::operator=(BsonType&& other71) noexcept { - (void) other71; +BsonType& BsonType::operator=(BsonType&& other75) noexcept { + (void) other75; return *this; } void BsonType::printTo(std::ostream& out) const { @@ -2453,6 +2528,11 @@ void LogicalType::__set_UUID(const UUIDType& val) { this->UUID = val; __isset.UUID = true; } + +void LogicalType::__set_FLOAT16(const Float16Type& val) { + this->FLOAT16 = val; +__isset.FLOAT16 = true; +} std::ostream& operator<<(std::ostream& out, const LogicalType& obj) { obj.printTo(out); @@ -2585,6 +2665,14 @@ uint32_t LogicalType::read(::apache::thrift::protocol::TProtocol* iprot) { xfer += iprot->skip(ftype); } break; + case 15: + if (ftype == ::apache::thrift::protocol::T_STRUCT) { + xfer += this->FLOAT16.read(iprot); + this->__isset.FLOAT16 = true; + } else { + xfer += iprot->skip(ftype); + } + break; default: xfer += iprot->skip(ftype); break; @@ -2667,6 +2755,11 @@ uint32_t LogicalType::write(::apache::thrift::protocol::TProtocol* oprot) const xfer += this->UUID.write(oprot); xfer += oprot->writeFieldEnd(); } + if (this->__isset.FLOAT16) { + xfer += oprot->writeFieldBegin("FLOAT16", ::apache::thrift::protocol::T_STRUCT, 15); + xfer += this->FLOAT16.write(oprot); + xfer += oprot->writeFieldEnd(); + } xfer += oprot->writeFieldStop(); xfer += oprot->writeStructEnd(); return xfer; @@ -2687,73 +2780,78 @@ void swap(LogicalType &a, LogicalType &b) { swap(a.JSON, b.JSON); swap(a.BSON, b.BSON); swap(a.UUID, b.UUID); + swap(a.FLOAT16, b.FLOAT16); swap(a.__isset, b.__isset); } -LogicalType::LogicalType(const LogicalType& other72) noexcept { - STRING = other72.STRING; - MAP = other72.MAP; - LIST = other72.LIST; - ENUM = other72.ENUM; - DECIMAL = other72.DECIMAL; - DATE = other72.DATE; - TIME = other72.TIME; - TIMESTAMP = other72.TIMESTAMP; - INTEGER = other72.INTEGER; - UNKNOWN = other72.UNKNOWN; - JSON = other72.JSON; - BSON = other72.BSON; - UUID = other72.UUID; - __isset = other72.__isset; -} -LogicalType::LogicalType(LogicalType&& other73) noexcept { - STRING = std::move(other73.STRING); - MAP = std::move(other73.MAP); - LIST = std::move(other73.LIST); - ENUM = std::move(other73.ENUM); - DECIMAL = std::move(other73.DECIMAL); - DATE = std::move(other73.DATE); - TIME = std::move(other73.TIME); - TIMESTAMP = std::move(other73.TIMESTAMP); - INTEGER = std::move(other73.INTEGER); - UNKNOWN = std::move(other73.UNKNOWN); - JSON = std::move(other73.JSON); - BSON = std::move(other73.BSON); - UUID = std::move(other73.UUID); - __isset = other73.__isset; -} -LogicalType& LogicalType::operator=(const LogicalType& other74) noexcept { - STRING = other74.STRING; - MAP = other74.MAP; - LIST = other74.LIST; - ENUM = other74.ENUM; - DECIMAL = other74.DECIMAL; - DATE = other74.DATE; - TIME = other74.TIME; - TIMESTAMP = other74.TIMESTAMP; - INTEGER = other74.INTEGER; - UNKNOWN = other74.UNKNOWN; - JSON = other74.JSON; - BSON = other74.BSON; - UUID = other74.UUID; - __isset = other74.__isset; +LogicalType::LogicalType(const LogicalType& other76) noexcept { + STRING = other76.STRING; + MAP = other76.MAP; + LIST = other76.LIST; + ENUM = other76.ENUM; + DECIMAL = other76.DECIMAL; + DATE = other76.DATE; + TIME = other76.TIME; + TIMESTAMP = other76.TIMESTAMP; + INTEGER = other76.INTEGER; + UNKNOWN = other76.UNKNOWN; + JSON = other76.JSON; + BSON = other76.BSON; + UUID = other76.UUID; + FLOAT16 = other76.FLOAT16; + __isset = other76.__isset; +} +LogicalType::LogicalType(LogicalType&& other77) noexcept { + STRING = std::move(other77.STRING); + MAP = std::move(other77.MAP); + LIST = std::move(other77.LIST); + ENUM = std::move(other77.ENUM); + DECIMAL = std::move(other77.DECIMAL); + DATE = std::move(other77.DATE); + TIME = std::move(other77.TIME); + TIMESTAMP = std::move(other77.TIMESTAMP); + INTEGER = std::move(other77.INTEGER); + UNKNOWN = std::move(other77.UNKNOWN); + JSON = std::move(other77.JSON); + BSON = std::move(other77.BSON); + UUID = std::move(other77.UUID); + FLOAT16 = std::move(other77.FLOAT16); + __isset = other77.__isset; +} +LogicalType& LogicalType::operator=(const LogicalType& other78) noexcept { + STRING = other78.STRING; + MAP = other78.MAP; + LIST = other78.LIST; + ENUM = other78.ENUM; + DECIMAL = other78.DECIMAL; + DATE = other78.DATE; + TIME = other78.TIME; + TIMESTAMP = other78.TIMESTAMP; + INTEGER = other78.INTEGER; + UNKNOWN = other78.UNKNOWN; + JSON = other78.JSON; + BSON = other78.BSON; + UUID = other78.UUID; + FLOAT16 = other78.FLOAT16; + __isset = other78.__isset; return *this; } -LogicalType& LogicalType::operator=(LogicalType&& other75) noexcept { - STRING = std::move(other75.STRING); - MAP = std::move(other75.MAP); - LIST = std::move(other75.LIST); - ENUM = std::move(other75.ENUM); - DECIMAL = std::move(other75.DECIMAL); - DATE = std::move(other75.DATE); - TIME = std::move(other75.TIME); - TIMESTAMP = std::move(other75.TIMESTAMP); - INTEGER = std::move(other75.INTEGER); - UNKNOWN = std::move(other75.UNKNOWN); - JSON = std::move(other75.JSON); - BSON = std::move(other75.BSON); - UUID = std::move(other75.UUID); - __isset = other75.__isset; +LogicalType& LogicalType::operator=(LogicalType&& other79) noexcept { + STRING = std::move(other79.STRING); + MAP = std::move(other79.MAP); + LIST = std::move(other79.LIST); + ENUM = std::move(other79.ENUM); + DECIMAL = std::move(other79.DECIMAL); + DATE = std::move(other79.DATE); + TIME = std::move(other79.TIME); + TIMESTAMP = std::move(other79.TIMESTAMP); + INTEGER = std::move(other79.INTEGER); + UNKNOWN = std::move(other79.UNKNOWN); + JSON = std::move(other79.JSON); + BSON = std::move(other79.BSON); + UUID = std::move(other79.UUID); + FLOAT16 = std::move(other79.FLOAT16); + __isset = other79.__isset; return *this; } void LogicalType::printTo(std::ostream& out) const { @@ -2772,6 +2870,7 @@ void LogicalType::printTo(std::ostream& out) const { out << ", " << "JSON="; (__isset.JSON ? (out << to_string(JSON)) : (out << "")); out << ", " << "BSON="; (__isset.BSON ? (out << to_string(BSON)) : (out << "")); out << ", " << "UUID="; (__isset.UUID ? (out << to_string(UUID)) : (out << "")); + out << ", " << "FLOAT16="; (__isset.FLOAT16 ? (out << to_string(FLOAT16)) : (out << "")); out << ")"; } @@ -2859,9 +2958,9 @@ uint32_t SchemaElement::read(::apache::thrift::protocol::TProtocol* iprot) { { case 1: if (ftype == ::apache::thrift::protocol::T_I32) { - int32_t ecast76; - xfer += iprot->readI32(ecast76); - this->type = static_cast(ecast76); + int32_t ecast80; + xfer += iprot->readI32(ecast80); + this->type = static_cast(ecast80); this->__isset.type = true; } else { xfer += iprot->skip(ftype); @@ -2877,9 +2976,9 @@ uint32_t SchemaElement::read(::apache::thrift::protocol::TProtocol* iprot) { break; case 3: if (ftype == ::apache::thrift::protocol::T_I32) { - int32_t ecast77; - xfer += iprot->readI32(ecast77); - this->repetition_type = static_cast(ecast77); + int32_t ecast81; + xfer += iprot->readI32(ecast81); + this->repetition_type = static_cast(ecast81); this->__isset.repetition_type = true; } else { xfer += iprot->skip(ftype); @@ -2903,9 +3002,9 @@ uint32_t SchemaElement::read(::apache::thrift::protocol::TProtocol* iprot) { break; case 6: if (ftype == ::apache::thrift::protocol::T_I32) { - int32_t ecast78; - xfer += iprot->readI32(ecast78); - this->converted_type = static_cast(ecast78); + int32_t ecast82; + xfer += iprot->readI32(ecast82); + this->converted_type = static_cast(ecast82); this->__isset.converted_type = true; } else { xfer += iprot->skip(ftype); @@ -3031,58 +3130,58 @@ void swap(SchemaElement &a, SchemaElement &b) { swap(a.__isset, b.__isset); } -SchemaElement::SchemaElement(const SchemaElement& other79) { - type = other79.type; - type_length = other79.type_length; - repetition_type = other79.repetition_type; - name = other79.name; - num_children = other79.num_children; - converted_type = other79.converted_type; - scale = other79.scale; - precision = other79.precision; - field_id = other79.field_id; - logicalType = other79.logicalType; - __isset = other79.__isset; -} -SchemaElement::SchemaElement(SchemaElement&& other80) noexcept { - type = other80.type; - type_length = other80.type_length; - repetition_type = other80.repetition_type; - name = std::move(other80.name); - num_children = other80.num_children; - converted_type = other80.converted_type; - scale = other80.scale; - precision = other80.precision; - field_id = other80.field_id; - logicalType = std::move(other80.logicalType); - __isset = other80.__isset; -} -SchemaElement& SchemaElement::operator=(const SchemaElement& other81) { - type = other81.type; - type_length = other81.type_length; - repetition_type = other81.repetition_type; - name = other81.name; - num_children = other81.num_children; - converted_type = other81.converted_type; - scale = other81.scale; - precision = other81.precision; - field_id = other81.field_id; - logicalType = other81.logicalType; - __isset = other81.__isset; +SchemaElement::SchemaElement(const SchemaElement& other83) { + type = other83.type; + type_length = other83.type_length; + repetition_type = other83.repetition_type; + name = other83.name; + num_children = other83.num_children; + converted_type = other83.converted_type; + scale = other83.scale; + precision = other83.precision; + field_id = other83.field_id; + logicalType = other83.logicalType; + __isset = other83.__isset; +} +SchemaElement::SchemaElement(SchemaElement&& other84) noexcept { + type = other84.type; + type_length = other84.type_length; + repetition_type = other84.repetition_type; + name = std::move(other84.name); + num_children = other84.num_children; + converted_type = other84.converted_type; + scale = other84.scale; + precision = other84.precision; + field_id = other84.field_id; + logicalType = std::move(other84.logicalType); + __isset = other84.__isset; +} +SchemaElement& SchemaElement::operator=(const SchemaElement& other85) { + type = other85.type; + type_length = other85.type_length; + repetition_type = other85.repetition_type; + name = other85.name; + num_children = other85.num_children; + converted_type = other85.converted_type; + scale = other85.scale; + precision = other85.precision; + field_id = other85.field_id; + logicalType = other85.logicalType; + __isset = other85.__isset; return *this; } -SchemaElement& SchemaElement::operator=(SchemaElement&& other82) noexcept { - type = other82.type; - type_length = other82.type_length; - repetition_type = other82.repetition_type; - name = std::move(other82.name); - num_children = other82.num_children; - converted_type = other82.converted_type; - scale = other82.scale; - precision = other82.precision; - field_id = other82.field_id; - logicalType = std::move(other82.logicalType); - __isset = other82.__isset; +SchemaElement& SchemaElement::operator=(SchemaElement&& other86) noexcept { + type = other86.type; + type_length = other86.type_length; + repetition_type = other86.repetition_type; + name = std::move(other86.name); + num_children = other86.num_children; + converted_type = other86.converted_type; + scale = other86.scale; + precision = other86.precision; + field_id = other86.field_id; + logicalType = std::move(other86.logicalType); + __isset = other86.__isset; return *this; } void SchemaElement::printTo(std::ostream& out) const { @@ -3168,9 +3267,9 @@ uint32_t DataPageHeader::read(::apache::thrift::protocol::TProtocol* iprot) { break; case 2: if (ftype == ::apache::thrift::protocol::T_I32) { - int32_t ecast83; - xfer += iprot->readI32(ecast83); - this->encoding = static_cast(ecast83); + int32_t ecast87; + xfer += iprot->readI32(ecast87); + this->encoding = static_cast(ecast87); isset_encoding = true; } else { xfer += iprot->skip(ftype); @@ -3178,9 +3277,9 @@ uint32_t DataPageHeader::read(::apache::thrift::protocol::TProtocol* iprot) { break; case 3: if (ftype == ::apache::thrift::protocol::T_I32) { - int32_t ecast84; - xfer += iprot->readI32(ecast84); - this->definition_level_encoding = static_cast(ecast84); + int32_t ecast88; + xfer += iprot->readI32(ecast88); + this->definition_level_encoding = static_cast(ecast88); isset_definition_level_encoding = true; } else { xfer += iprot->skip(ftype); @@ -3188,9 +3287,9 @@ uint32_t DataPageHeader::read(::apache::thrift::protocol::TProtocol* iprot) { break; case 4: if (ftype == ::apache::thrift::protocol::T_I32) { - int32_t ecast85; - xfer += iprot->readI32(ecast85); - this->repetition_level_encoding = static_cast(ecast85); + int32_t ecast89; + xfer += iprot->readI32(ecast89); + this->repetition_level_encoding = static_cast(ecast89); isset_repetition_level_encoding = true; } else { xfer += iprot->skip(ftype); @@ -3265,38 +3364,38 @@ void swap(DataPageHeader &a, DataPageHeader &b) { swap(a.__isset, b.__isset); } -DataPageHeader::DataPageHeader(const DataPageHeader& other86) { - num_values = other86.num_values; - encoding = other86.encoding; - definition_level_encoding = other86.definition_level_encoding; - repetition_level_encoding = other86.repetition_level_encoding; - statistics = other86.statistics; - __isset = other86.__isset; -} -DataPageHeader::DataPageHeader(DataPageHeader&& other87) noexcept { - num_values = other87.num_values; - encoding = other87.encoding; - definition_level_encoding = other87.definition_level_encoding; - repetition_level_encoding = other87.repetition_level_encoding; - statistics = std::move(other87.statistics); - __isset = other87.__isset; -} -DataPageHeader& DataPageHeader::operator=(const DataPageHeader& other88) { - num_values = other88.num_values; - encoding = other88.encoding; - definition_level_encoding = other88.definition_level_encoding; - repetition_level_encoding = other88.repetition_level_encoding; - statistics = other88.statistics; - __isset = other88.__isset; +DataPageHeader::DataPageHeader(const DataPageHeader& other90) { + num_values = other90.num_values; + encoding = other90.encoding; + definition_level_encoding = other90.definition_level_encoding; + repetition_level_encoding = other90.repetition_level_encoding; + statistics = other90.statistics; + __isset = other90.__isset; +} +DataPageHeader::DataPageHeader(DataPageHeader&& other91) noexcept { + num_values = other91.num_values; + encoding = other91.encoding; + definition_level_encoding = other91.definition_level_encoding; + repetition_level_encoding = other91.repetition_level_encoding; + statistics = std::move(other91.statistics); + __isset = other91.__isset; +} +DataPageHeader& DataPageHeader::operator=(const DataPageHeader& other92) { + num_values = other92.num_values; + encoding = other92.encoding; + definition_level_encoding = other92.definition_level_encoding; + repetition_level_encoding = other92.repetition_level_encoding; + statistics = other92.statistics; + __isset = other92.__isset; return *this; } -DataPageHeader& DataPageHeader::operator=(DataPageHeader&& other89) noexcept { - num_values = other89.num_values; - encoding = other89.encoding; - definition_level_encoding = other89.definition_level_encoding; - repetition_level_encoding = other89.repetition_level_encoding; - statistics = std::move(other89.statistics); - __isset = other89.__isset; +DataPageHeader& DataPageHeader::operator=(DataPageHeader&& other93) noexcept { + num_values = other93.num_values; + encoding = other93.encoding; + definition_level_encoding = other93.definition_level_encoding; + repetition_level_encoding = other93.repetition_level_encoding; + statistics = std::move(other93.statistics); + __isset = other93.__isset; return *this; } void DataPageHeader::printTo(std::ostream& out) const { @@ -3365,18 +3464,18 @@ void swap(IndexPageHeader &a, IndexPageHeader &b) { (void) b; } -IndexPageHeader::IndexPageHeader(const IndexPageHeader& other90) noexcept { - (void) other90; +IndexPageHeader::IndexPageHeader(const IndexPageHeader& other94) noexcept { + (void) other94; } -IndexPageHeader::IndexPageHeader(IndexPageHeader&& other91) noexcept { - (void) other91; +IndexPageHeader::IndexPageHeader(IndexPageHeader&& other95) noexcept { + (void) other95; } -IndexPageHeader& IndexPageHeader::operator=(const IndexPageHeader& other92) noexcept { - (void) other92; +IndexPageHeader& IndexPageHeader::operator=(const IndexPageHeader& other96) noexcept { + (void) other96; return *this; } -IndexPageHeader& IndexPageHeader::operator=(IndexPageHeader&& other93) noexcept { - (void) other93; +IndexPageHeader& IndexPageHeader::operator=(IndexPageHeader&& other97) noexcept { + (void) other97; return *this; } void IndexPageHeader::printTo(std::ostream& out) const { @@ -3442,9 +3541,9 @@ uint32_t DictionaryPageHeader::read(::apache::thrift::protocol::TProtocol* iprot break; case 2: if (ftype == ::apache::thrift::protocol::T_I32) { - int32_t ecast94; - xfer += iprot->readI32(ecast94); - this->encoding = static_cast(ecast94); + int32_t ecast98; + xfer += iprot->readI32(ecast98); + this->encoding = static_cast(ecast98); isset_encoding = true; } else { xfer += iprot->skip(ftype); @@ -3505,30 +3604,30 @@ void swap(DictionaryPageHeader &a, DictionaryPageHeader &b) { swap(a.__isset, b.__isset); } -DictionaryPageHeader::DictionaryPageHeader(const DictionaryPageHeader& other95) noexcept { - num_values = other95.num_values; - encoding = other95.encoding; - is_sorted = other95.is_sorted; - __isset = other95.__isset; +DictionaryPageHeader::DictionaryPageHeader(const DictionaryPageHeader& other99) noexcept { + num_values = other99.num_values; + encoding = other99.encoding; + is_sorted = other99.is_sorted; + __isset = other99.__isset; } -DictionaryPageHeader::DictionaryPageHeader(DictionaryPageHeader&& other96) noexcept { - num_values = other96.num_values; - encoding = other96.encoding; - is_sorted = other96.is_sorted; - __isset = other96.__isset; +DictionaryPageHeader::DictionaryPageHeader(DictionaryPageHeader&& other100) noexcept { + num_values = other100.num_values; + encoding = other100.encoding; + is_sorted = other100.is_sorted; + __isset = other100.__isset; } -DictionaryPageHeader& DictionaryPageHeader::operator=(const DictionaryPageHeader& other97) noexcept { - num_values = other97.num_values; - encoding = other97.encoding; - is_sorted = other97.is_sorted; - __isset = other97.__isset; +DictionaryPageHeader& DictionaryPageHeader::operator=(const DictionaryPageHeader& other101) noexcept { + num_values = other101.num_values; + encoding = other101.encoding; + is_sorted = other101.is_sorted; + __isset = other101.__isset; return *this; } -DictionaryPageHeader& DictionaryPageHeader::operator=(DictionaryPageHeader&& other98) noexcept { - num_values = other98.num_values; - encoding = other98.encoding; - is_sorted = other98.is_sorted; - __isset = other98.__isset; +DictionaryPageHeader& DictionaryPageHeader::operator=(DictionaryPageHeader&& other102) noexcept { + num_values = other102.num_values; + encoding = other102.encoding; + is_sorted = other102.is_sorted; + __isset = other102.__isset; return *this; } void DictionaryPageHeader::printTo(std::ostream& out) const { @@ -3638,9 +3737,9 @@ uint32_t DataPageHeaderV2::read(::apache::thrift::protocol::TProtocol* iprot) { break; case 4: if (ftype == ::apache::thrift::protocol::T_I32) { - int32_t ecast99; - xfer += iprot->readI32(ecast99); - this->encoding = static_cast(ecast99); + int32_t ecast103; + xfer += iprot->readI32(ecast103); + this->encoding = static_cast(ecast103); isset_encoding = true; } else { xfer += iprot->skip(ftype); @@ -3759,50 +3858,50 @@ void swap(DataPageHeaderV2 &a, DataPageHeaderV2 &b) { swap(a.__isset, b.__isset); } -DataPageHeaderV2::DataPageHeaderV2(const DataPageHeaderV2& other100) { - num_values = other100.num_values; - num_nulls = other100.num_nulls; - num_rows = other100.num_rows; - encoding = other100.encoding; - definition_levels_byte_length = other100.definition_levels_byte_length; - repetition_levels_byte_length = other100.repetition_levels_byte_length; - is_compressed = other100.is_compressed; - statistics = other100.statistics; - __isset = other100.__isset; -} -DataPageHeaderV2::DataPageHeaderV2(DataPageHeaderV2&& other101) noexcept { - num_values = other101.num_values; - num_nulls = other101.num_nulls; - num_rows = other101.num_rows; - encoding = other101.encoding; - definition_levels_byte_length = other101.definition_levels_byte_length; - repetition_levels_byte_length = other101.repetition_levels_byte_length; - is_compressed = other101.is_compressed; - statistics = std::move(other101.statistics); - __isset = other101.__isset; -} -DataPageHeaderV2& DataPageHeaderV2::operator=(const DataPageHeaderV2& other102) { - num_values = other102.num_values; - num_nulls = other102.num_nulls; - num_rows = other102.num_rows; - encoding = other102.encoding; - definition_levels_byte_length = other102.definition_levels_byte_length; - repetition_levels_byte_length = other102.repetition_levels_byte_length; - is_compressed = other102.is_compressed; - statistics = other102.statistics; - __isset = other102.__isset; +DataPageHeaderV2::DataPageHeaderV2(const DataPageHeaderV2& other104) { + num_values = other104.num_values; + num_nulls = other104.num_nulls; + num_rows = other104.num_rows; + encoding = other104.encoding; + definition_levels_byte_length = other104.definition_levels_byte_length; + repetition_levels_byte_length = other104.repetition_levels_byte_length; + is_compressed = other104.is_compressed; + statistics = other104.statistics; + __isset = other104.__isset; +} +DataPageHeaderV2::DataPageHeaderV2(DataPageHeaderV2&& other105) noexcept { + num_values = other105.num_values; + num_nulls = other105.num_nulls; + num_rows = other105.num_rows; + encoding = other105.encoding; + definition_levels_byte_length = other105.definition_levels_byte_length; + repetition_levels_byte_length = other105.repetition_levels_byte_length; + is_compressed = other105.is_compressed; + statistics = std::move(other105.statistics); + __isset = other105.__isset; +} +DataPageHeaderV2& DataPageHeaderV2::operator=(const DataPageHeaderV2& other106) { + num_values = other106.num_values; + num_nulls = other106.num_nulls; + num_rows = other106.num_rows; + encoding = other106.encoding; + definition_levels_byte_length = other106.definition_levels_byte_length; + repetition_levels_byte_length = other106.repetition_levels_byte_length; + is_compressed = other106.is_compressed; + statistics = other106.statistics; + __isset = other106.__isset; return *this; } -DataPageHeaderV2& DataPageHeaderV2::operator=(DataPageHeaderV2&& other103) noexcept { - num_values = other103.num_values; - num_nulls = other103.num_nulls; - num_rows = other103.num_rows; - encoding = other103.encoding; - definition_levels_byte_length = other103.definition_levels_byte_length; - repetition_levels_byte_length = other103.repetition_levels_byte_length; - is_compressed = other103.is_compressed; - statistics = std::move(other103.statistics); - __isset = other103.__isset; +DataPageHeaderV2& DataPageHeaderV2::operator=(DataPageHeaderV2&& other107) noexcept { + num_values = other107.num_values; + num_nulls = other107.num_nulls; + num_rows = other107.num_rows; + encoding = other107.encoding; + definition_levels_byte_length = other107.definition_levels_byte_length; + repetition_levels_byte_length = other107.repetition_levels_byte_length; + is_compressed = other107.is_compressed; + statistics = std::move(other107.statistics); + __isset = other107.__isset; return *this; } void DataPageHeaderV2::printTo(std::ostream& out) const { @@ -3874,18 +3973,18 @@ void swap(SplitBlockAlgorithm &a, SplitBlockAlgorithm &b) { (void) b; } -SplitBlockAlgorithm::SplitBlockAlgorithm(const SplitBlockAlgorithm& other104) noexcept { - (void) other104; +SplitBlockAlgorithm::SplitBlockAlgorithm(const SplitBlockAlgorithm& other108) noexcept { + (void) other108; } -SplitBlockAlgorithm::SplitBlockAlgorithm(SplitBlockAlgorithm&& other105) noexcept { - (void) other105; +SplitBlockAlgorithm::SplitBlockAlgorithm(SplitBlockAlgorithm&& other109) noexcept { + (void) other109; } -SplitBlockAlgorithm& SplitBlockAlgorithm::operator=(const SplitBlockAlgorithm& other106) noexcept { - (void) other106; +SplitBlockAlgorithm& SplitBlockAlgorithm::operator=(const SplitBlockAlgorithm& other110) noexcept { + (void) other110; return *this; } -SplitBlockAlgorithm& SplitBlockAlgorithm::operator=(SplitBlockAlgorithm&& other107) noexcept { - (void) other107; +SplitBlockAlgorithm& SplitBlockAlgorithm::operator=(SplitBlockAlgorithm&& other111) noexcept { + (void) other111; return *this; } void SplitBlockAlgorithm::printTo(std::ostream& out) const { @@ -3972,22 +4071,22 @@ void swap(BloomFilterAlgorithm &a, BloomFilterAlgorithm &b) { swap(a.__isset, b.__isset); } -BloomFilterAlgorithm::BloomFilterAlgorithm(const BloomFilterAlgorithm& other108) noexcept { - BLOCK = other108.BLOCK; - __isset = other108.__isset; +BloomFilterAlgorithm::BloomFilterAlgorithm(const BloomFilterAlgorithm& other112) noexcept { + BLOCK = other112.BLOCK; + __isset = other112.__isset; } -BloomFilterAlgorithm::BloomFilterAlgorithm(BloomFilterAlgorithm&& other109) noexcept { - BLOCK = std::move(other109.BLOCK); - __isset = other109.__isset; +BloomFilterAlgorithm::BloomFilterAlgorithm(BloomFilterAlgorithm&& other113) noexcept { + BLOCK = std::move(other113.BLOCK); + __isset = other113.__isset; } -BloomFilterAlgorithm& BloomFilterAlgorithm::operator=(const BloomFilterAlgorithm& other110) noexcept { - BLOCK = other110.BLOCK; - __isset = other110.__isset; +BloomFilterAlgorithm& BloomFilterAlgorithm::operator=(const BloomFilterAlgorithm& other114) noexcept { + BLOCK = other114.BLOCK; + __isset = other114.__isset; return *this; } -BloomFilterAlgorithm& BloomFilterAlgorithm::operator=(BloomFilterAlgorithm&& other111) noexcept { - BLOCK = std::move(other111.BLOCK); - __isset = other111.__isset; +BloomFilterAlgorithm& BloomFilterAlgorithm::operator=(BloomFilterAlgorithm&& other115) noexcept { + BLOCK = std::move(other115.BLOCK); + __isset = other115.__isset; return *this; } void BloomFilterAlgorithm::printTo(std::ostream& out) const { @@ -4052,18 +4151,18 @@ void swap(XxHash &a, XxHash &b) { (void) b; } -XxHash::XxHash(const XxHash& other112) noexcept { - (void) other112; +XxHash::XxHash(const XxHash& other116) noexcept { + (void) other116; } -XxHash::XxHash(XxHash&& other113) noexcept { - (void) other113; +XxHash::XxHash(XxHash&& other117) noexcept { + (void) other117; } -XxHash& XxHash::operator=(const XxHash& other114) noexcept { - (void) other114; +XxHash& XxHash::operator=(const XxHash& other118) noexcept { + (void) other118; return *this; } -XxHash& XxHash::operator=(XxHash&& other115) noexcept { - (void) other115; +XxHash& XxHash::operator=(XxHash&& other119) noexcept { + (void) other119; return *this; } void XxHash::printTo(std::ostream& out) const { @@ -4150,22 +4249,22 @@ void swap(BloomFilterHash &a, BloomFilterHash &b) { swap(a.__isset, b.__isset); } -BloomFilterHash::BloomFilterHash(const BloomFilterHash& other116) noexcept { - XXHASH = other116.XXHASH; - __isset = other116.__isset; +BloomFilterHash::BloomFilterHash(const BloomFilterHash& other120) noexcept { + XXHASH = other120.XXHASH; + __isset = other120.__isset; } -BloomFilterHash::BloomFilterHash(BloomFilterHash&& other117) noexcept { - XXHASH = std::move(other117.XXHASH); - __isset = other117.__isset; +BloomFilterHash::BloomFilterHash(BloomFilterHash&& other121) noexcept { + XXHASH = std::move(other121.XXHASH); + __isset = other121.__isset; } -BloomFilterHash& BloomFilterHash::operator=(const BloomFilterHash& other118) noexcept { - XXHASH = other118.XXHASH; - __isset = other118.__isset; +BloomFilterHash& BloomFilterHash::operator=(const BloomFilterHash& other122) noexcept { + XXHASH = other122.XXHASH; + __isset = other122.__isset; return *this; } -BloomFilterHash& BloomFilterHash::operator=(BloomFilterHash&& other119) noexcept { - XXHASH = std::move(other119.XXHASH); - __isset = other119.__isset; +BloomFilterHash& BloomFilterHash::operator=(BloomFilterHash&& other123) noexcept { + XXHASH = std::move(other123.XXHASH); + __isset = other123.__isset; return *this; } void BloomFilterHash::printTo(std::ostream& out) const { @@ -4230,18 +4329,18 @@ void swap(Uncompressed &a, Uncompressed &b) { (void) b; } -Uncompressed::Uncompressed(const Uncompressed& other120) noexcept { - (void) other120; +Uncompressed::Uncompressed(const Uncompressed& other124) noexcept { + (void) other124; } -Uncompressed::Uncompressed(Uncompressed&& other121) noexcept { - (void) other121; +Uncompressed::Uncompressed(Uncompressed&& other125) noexcept { + (void) other125; } -Uncompressed& Uncompressed::operator=(const Uncompressed& other122) noexcept { - (void) other122; +Uncompressed& Uncompressed::operator=(const Uncompressed& other126) noexcept { + (void) other126; return *this; } -Uncompressed& Uncompressed::operator=(Uncompressed&& other123) noexcept { - (void) other123; +Uncompressed& Uncompressed::operator=(Uncompressed&& other127) noexcept { + (void) other127; return *this; } void Uncompressed::printTo(std::ostream& out) const { @@ -4328,22 +4427,22 @@ void swap(BloomFilterCompression &a, BloomFilterCompression &b) { swap(a.__isset, b.__isset); } -BloomFilterCompression::BloomFilterCompression(const BloomFilterCompression& other124) noexcept { - UNCOMPRESSED = other124.UNCOMPRESSED; - __isset = other124.__isset; +BloomFilterCompression::BloomFilterCompression(const BloomFilterCompression& other128) noexcept { + UNCOMPRESSED = other128.UNCOMPRESSED; + __isset = other128.__isset; } -BloomFilterCompression::BloomFilterCompression(BloomFilterCompression&& other125) noexcept { - UNCOMPRESSED = std::move(other125.UNCOMPRESSED); - __isset = other125.__isset; +BloomFilterCompression::BloomFilterCompression(BloomFilterCompression&& other129) noexcept { + UNCOMPRESSED = std::move(other129.UNCOMPRESSED); + __isset = other129.__isset; } -BloomFilterCompression& BloomFilterCompression::operator=(const BloomFilterCompression& other126) noexcept { - UNCOMPRESSED = other126.UNCOMPRESSED; - __isset = other126.__isset; +BloomFilterCompression& BloomFilterCompression::operator=(const BloomFilterCompression& other130) noexcept { + UNCOMPRESSED = other130.UNCOMPRESSED; + __isset = other130.__isset; return *this; } -BloomFilterCompression& BloomFilterCompression::operator=(BloomFilterCompression&& other127) noexcept { - UNCOMPRESSED = std::move(other127.UNCOMPRESSED); - __isset = other127.__isset; +BloomFilterCompression& BloomFilterCompression::operator=(BloomFilterCompression&& other131) noexcept { + UNCOMPRESSED = std::move(other131.UNCOMPRESSED); + __isset = other131.__isset; return *this; } void BloomFilterCompression::printTo(std::ostream& out) const { @@ -4491,30 +4590,30 @@ void swap(BloomFilterHeader &a, BloomFilterHeader &b) { swap(a.compression, b.compression); } -BloomFilterHeader::BloomFilterHeader(const BloomFilterHeader& other128) noexcept { - numBytes = other128.numBytes; - algorithm = other128.algorithm; - hash = other128.hash; - compression = other128.compression; +BloomFilterHeader::BloomFilterHeader(const BloomFilterHeader& other132) noexcept { + numBytes = other132.numBytes; + algorithm = other132.algorithm; + hash = other132.hash; + compression = other132.compression; } -BloomFilterHeader::BloomFilterHeader(BloomFilterHeader&& other129) noexcept { - numBytes = other129.numBytes; - algorithm = std::move(other129.algorithm); - hash = std::move(other129.hash); - compression = std::move(other129.compression); +BloomFilterHeader::BloomFilterHeader(BloomFilterHeader&& other133) noexcept { + numBytes = other133.numBytes; + algorithm = std::move(other133.algorithm); + hash = std::move(other133.hash); + compression = std::move(other133.compression); } -BloomFilterHeader& BloomFilterHeader::operator=(const BloomFilterHeader& other130) noexcept { - numBytes = other130.numBytes; - algorithm = other130.algorithm; - hash = other130.hash; - compression = other130.compression; +BloomFilterHeader& BloomFilterHeader::operator=(const BloomFilterHeader& other134) noexcept { + numBytes = other134.numBytes; + algorithm = other134.algorithm; + hash = other134.hash; + compression = other134.compression; return *this; } -BloomFilterHeader& BloomFilterHeader::operator=(BloomFilterHeader&& other131) noexcept { - numBytes = other131.numBytes; - algorithm = std::move(other131.algorithm); - hash = std::move(other131.hash); - compression = std::move(other131.compression); +BloomFilterHeader& BloomFilterHeader::operator=(BloomFilterHeader&& other135) noexcept { + numBytes = other135.numBytes; + algorithm = std::move(other135.algorithm); + hash = std::move(other135.hash); + compression = std::move(other135.compression); return *this; } void BloomFilterHeader::printTo(std::ostream& out) const { @@ -4601,9 +4700,9 @@ uint32_t PageHeader::read(::apache::thrift::protocol::TProtocol* iprot) { { case 1: if (ftype == ::apache::thrift::protocol::T_I32) { - int32_t ecast132; - xfer += iprot->readI32(ecast132); - this->type = static_cast(ecast132); + int32_t ecast136; + xfer += iprot->readI32(ecast136); + this->type = static_cast(ecast136); isset_type = true; } else { xfer += iprot->skip(ftype); @@ -4743,50 +4842,50 @@ void swap(PageHeader &a, PageHeader &b) { swap(a.__isset, b.__isset); } -PageHeader::PageHeader(const PageHeader& other133) { - type = other133.type; - uncompressed_page_size = other133.uncompressed_page_size; - compressed_page_size = other133.compressed_page_size; - crc = other133.crc; - data_page_header = other133.data_page_header; - index_page_header = other133.index_page_header; - dictionary_page_header = other133.dictionary_page_header; - data_page_header_v2 = other133.data_page_header_v2; - __isset = other133.__isset; -} -PageHeader::PageHeader(PageHeader&& other134) noexcept { - type = other134.type; - uncompressed_page_size = other134.uncompressed_page_size; - compressed_page_size = other134.compressed_page_size; - crc = other134.crc; - data_page_header = std::move(other134.data_page_header); - index_page_header = std::move(other134.index_page_header); - dictionary_page_header = std::move(other134.dictionary_page_header); - data_page_header_v2 = std::move(other134.data_page_header_v2); - __isset = other134.__isset; -} -PageHeader& PageHeader::operator=(const PageHeader& other135) { - type = other135.type; - uncompressed_page_size = other135.uncompressed_page_size; - compressed_page_size = other135.compressed_page_size; - crc = other135.crc; - data_page_header = other135.data_page_header; - index_page_header = other135.index_page_header; - dictionary_page_header = other135.dictionary_page_header; - data_page_header_v2 = other135.data_page_header_v2; - __isset = other135.__isset; +PageHeader::PageHeader(const PageHeader& other137) { + type = other137.type; + uncompressed_page_size = other137.uncompressed_page_size; + compressed_page_size = other137.compressed_page_size; + crc = other137.crc; + data_page_header = other137.data_page_header; + index_page_header = other137.index_page_header; + dictionary_page_header = other137.dictionary_page_header; + data_page_header_v2 = other137.data_page_header_v2; + __isset = other137.__isset; +} +PageHeader::PageHeader(PageHeader&& other138) noexcept { + type = other138.type; + uncompressed_page_size = other138.uncompressed_page_size; + compressed_page_size = other138.compressed_page_size; + crc = other138.crc; + data_page_header = std::move(other138.data_page_header); + index_page_header = std::move(other138.index_page_header); + dictionary_page_header = std::move(other138.dictionary_page_header); + data_page_header_v2 = std::move(other138.data_page_header_v2); + __isset = other138.__isset; +} +PageHeader& PageHeader::operator=(const PageHeader& other139) { + type = other139.type; + uncompressed_page_size = other139.uncompressed_page_size; + compressed_page_size = other139.compressed_page_size; + crc = other139.crc; + data_page_header = other139.data_page_header; + index_page_header = other139.index_page_header; + dictionary_page_header = other139.dictionary_page_header; + data_page_header_v2 = other139.data_page_header_v2; + __isset = other139.__isset; return *this; } -PageHeader& PageHeader::operator=(PageHeader&& other136) noexcept { - type = other136.type; - uncompressed_page_size = other136.uncompressed_page_size; - compressed_page_size = other136.compressed_page_size; - crc = other136.crc; - data_page_header = std::move(other136.data_page_header); - index_page_header = std::move(other136.index_page_header); - dictionary_page_header = std::move(other136.dictionary_page_header); - data_page_header_v2 = std::move(other136.data_page_header_v2); - __isset = other136.__isset; +PageHeader& PageHeader::operator=(PageHeader&& other140) noexcept { + type = other140.type; + uncompressed_page_size = other140.uncompressed_page_size; + compressed_page_size = other140.compressed_page_size; + crc = other140.crc; + data_page_header = std::move(other140.data_page_header); + index_page_header = std::move(other140.index_page_header); + dictionary_page_header = std::move(other140.dictionary_page_header); + data_page_header_v2 = std::move(other140.data_page_header_v2); + __isset = other140.__isset; return *this; } void PageHeader::printTo(std::ostream& out) const { @@ -4901,26 +5000,26 @@ void swap(KeyValue &a, KeyValue &b) { swap(a.__isset, b.__isset); } -KeyValue::KeyValue(const KeyValue& other137) { - key = other137.key; - value = other137.value; - __isset = other137.__isset; +KeyValue::KeyValue(const KeyValue& other141) { + key = other141.key; + value = other141.value; + __isset = other141.__isset; } -KeyValue::KeyValue(KeyValue&& other138) noexcept { - key = std::move(other138.key); - value = std::move(other138.value); - __isset = other138.__isset; +KeyValue::KeyValue(KeyValue&& other142) noexcept { + key = std::move(other142.key); + value = std::move(other142.value); + __isset = other142.__isset; } -KeyValue& KeyValue::operator=(const KeyValue& other139) { - key = other139.key; - value = other139.value; - __isset = other139.__isset; +KeyValue& KeyValue::operator=(const KeyValue& other143) { + key = other143.key; + value = other143.value; + __isset = other143.__isset; return *this; } -KeyValue& KeyValue::operator=(KeyValue&& other140) noexcept { - key = std::move(other140.key); - value = std::move(other140.value); - __isset = other140.__isset; +KeyValue& KeyValue::operator=(KeyValue&& other144) noexcept { + key = std::move(other144.key); + value = std::move(other144.value); + __isset = other144.__isset; return *this; } void KeyValue::printTo(std::ostream& out) const { @@ -5049,26 +5148,26 @@ void swap(SortingColumn &a, SortingColumn &b) { swap(a.nulls_first, b.nulls_first); } -SortingColumn::SortingColumn(const SortingColumn& other141) noexcept { - column_idx = other141.column_idx; - descending = other141.descending; - nulls_first = other141.nulls_first; +SortingColumn::SortingColumn(const SortingColumn& other145) noexcept { + column_idx = other145.column_idx; + descending = other145.descending; + nulls_first = other145.nulls_first; } -SortingColumn::SortingColumn(SortingColumn&& other142) noexcept { - column_idx = other142.column_idx; - descending = other142.descending; - nulls_first = other142.nulls_first; +SortingColumn::SortingColumn(SortingColumn&& other146) noexcept { + column_idx = other146.column_idx; + descending = other146.descending; + nulls_first = other146.nulls_first; } -SortingColumn& SortingColumn::operator=(const SortingColumn& other143) noexcept { - column_idx = other143.column_idx; - descending = other143.descending; - nulls_first = other143.nulls_first; +SortingColumn& SortingColumn::operator=(const SortingColumn& other147) noexcept { + column_idx = other147.column_idx; + descending = other147.descending; + nulls_first = other147.nulls_first; return *this; } -SortingColumn& SortingColumn::operator=(SortingColumn&& other144) noexcept { - column_idx = other144.column_idx; - descending = other144.descending; - nulls_first = other144.nulls_first; +SortingColumn& SortingColumn::operator=(SortingColumn&& other148) noexcept { + column_idx = other148.column_idx; + descending = other148.descending; + nulls_first = other148.nulls_first; return *this; } void SortingColumn::printTo(std::ostream& out) const { @@ -5129,9 +5228,9 @@ uint32_t PageEncodingStats::read(::apache::thrift::protocol::TProtocol* iprot) { { case 1: if (ftype == ::apache::thrift::protocol::T_I32) { - int32_t ecast145; - xfer += iprot->readI32(ecast145); - this->page_type = static_cast(ecast145); + int32_t ecast149; + xfer += iprot->readI32(ecast149); + this->page_type = static_cast(ecast149); isset_page_type = true; } else { xfer += iprot->skip(ftype); @@ -5139,9 +5238,9 @@ uint32_t PageEncodingStats::read(::apache::thrift::protocol::TProtocol* iprot) { break; case 2: if (ftype == ::apache::thrift::protocol::T_I32) { - int32_t ecast146; - xfer += iprot->readI32(ecast146); - this->encoding = static_cast(ecast146); + int32_t ecast150; + xfer += iprot->readI32(ecast150); + this->encoding = static_cast(ecast150); isset_encoding = true; } else { xfer += iprot->skip(ftype); @@ -5202,26 +5301,26 @@ void swap(PageEncodingStats &a, PageEncodingStats &b) { swap(a.count, b.count); } -PageEncodingStats::PageEncodingStats(const PageEncodingStats& other147) noexcept { - page_type = other147.page_type; - encoding = other147.encoding; - count = other147.count; +PageEncodingStats::PageEncodingStats(const PageEncodingStats& other151) noexcept { + page_type = other151.page_type; + encoding = other151.encoding; + count = other151.count; } -PageEncodingStats::PageEncodingStats(PageEncodingStats&& other148) noexcept { - page_type = other148.page_type; - encoding = other148.encoding; - count = other148.count; +PageEncodingStats::PageEncodingStats(PageEncodingStats&& other152) noexcept { + page_type = other152.page_type; + encoding = other152.encoding; + count = other152.count; } -PageEncodingStats& PageEncodingStats::operator=(const PageEncodingStats& other149) noexcept { - page_type = other149.page_type; - encoding = other149.encoding; - count = other149.count; +PageEncodingStats& PageEncodingStats::operator=(const PageEncodingStats& other153) noexcept { + page_type = other153.page_type; + encoding = other153.encoding; + count = other153.count; return *this; } -PageEncodingStats& PageEncodingStats::operator=(PageEncodingStats&& other150) noexcept { - page_type = other150.page_type; - encoding = other150.encoding; - count = other150.count; +PageEncodingStats& PageEncodingStats::operator=(PageEncodingStats&& other154) noexcept { + page_type = other154.page_type; + encoding = other154.encoding; + count = other154.count; return *this; } void PageEncodingStats::printTo(std::ostream& out) const { @@ -5337,9 +5436,9 @@ uint32_t ColumnMetaData::read(::apache::thrift::protocol::TProtocol* iprot) { { case 1: if (ftype == ::apache::thrift::protocol::T_I32) { - int32_t ecast151; - xfer += iprot->readI32(ecast151); - this->type = static_cast(ecast151); + int32_t ecast155; + xfer += iprot->readI32(ecast155); + this->type = static_cast(ecast155); isset_type = true; } else { xfer += iprot->skip(ftype); @@ -5349,16 +5448,16 @@ uint32_t ColumnMetaData::read(::apache::thrift::protocol::TProtocol* iprot) { if (ftype == ::apache::thrift::protocol::T_LIST) { { this->encodings.clear(); - uint32_t _size152; - ::apache::thrift::protocol::TType _etype155; - xfer += iprot->readListBegin(_etype155, _size152); - this->encodings.resize(_size152); - uint32_t _i156; - for (_i156 = 0; _i156 < _size152; ++_i156) + uint32_t _size156; + ::apache::thrift::protocol::TType _etype159; + xfer += iprot->readListBegin(_etype159, _size156); + this->encodings.resize(_size156); + uint32_t _i160; + for (_i160 = 0; _i160 < _size156; ++_i160) { - int32_t ecast157; - xfer += iprot->readI32(ecast157); - this->encodings[_i156] = static_cast(ecast157); + int32_t ecast161; + xfer += iprot->readI32(ecast161); + this->encodings[_i160] = static_cast(ecast161); } xfer += iprot->readListEnd(); } @@ -5371,14 +5470,14 @@ uint32_t ColumnMetaData::read(::apache::thrift::protocol::TProtocol* iprot) { if (ftype == ::apache::thrift::protocol::T_LIST) { { this->path_in_schema.clear(); - uint32_t _size158; - ::apache::thrift::protocol::TType _etype161; - xfer += iprot->readListBegin(_etype161, _size158); - this->path_in_schema.resize(_size158); - uint32_t _i162; - for (_i162 = 0; _i162 < _size158; ++_i162) + uint32_t _size162; + ::apache::thrift::protocol::TType _etype165; + xfer += iprot->readListBegin(_etype165, _size162); + this->path_in_schema.resize(_size162); + uint32_t _i166; + for (_i166 = 0; _i166 < _size162; ++_i166) { - xfer += iprot->readString(this->path_in_schema[_i162]); + xfer += iprot->readString(this->path_in_schema[_i166]); } xfer += iprot->readListEnd(); } @@ -5389,9 +5488,9 @@ uint32_t ColumnMetaData::read(::apache::thrift::protocol::TProtocol* iprot) { break; case 4: if (ftype == ::apache::thrift::protocol::T_I32) { - int32_t ecast163; - xfer += iprot->readI32(ecast163); - this->codec = static_cast(ecast163); + int32_t ecast167; + xfer += iprot->readI32(ecast167); + this->codec = static_cast(ecast167); isset_codec = true; } else { xfer += iprot->skip(ftype); @@ -5425,14 +5524,14 @@ uint32_t ColumnMetaData::read(::apache::thrift::protocol::TProtocol* iprot) { if (ftype == ::apache::thrift::protocol::T_LIST) { { this->key_value_metadata.clear(); - uint32_t _size164; - ::apache::thrift::protocol::TType _etype167; - xfer += iprot->readListBegin(_etype167, _size164); - this->key_value_metadata.resize(_size164); - uint32_t _i168; - for (_i168 = 0; _i168 < _size164; ++_i168) + uint32_t _size168; + ::apache::thrift::protocol::TType _etype171; + xfer += iprot->readListBegin(_etype171, _size168); + this->key_value_metadata.resize(_size168); + uint32_t _i172; + for (_i172 = 0; _i172 < _size168; ++_i172) { - xfer += this->key_value_metadata[_i168].read(iprot); + xfer += this->key_value_metadata[_i172].read(iprot); } xfer += iprot->readListEnd(); } @@ -5477,14 +5576,14 @@ uint32_t ColumnMetaData::read(::apache::thrift::protocol::TProtocol* iprot) { if (ftype == ::apache::thrift::protocol::T_LIST) { { this->encoding_stats.clear(); - uint32_t _size169; - ::apache::thrift::protocol::TType _etype172; - xfer += iprot->readListBegin(_etype172, _size169); - this->encoding_stats.resize(_size169); - uint32_t _i173; - for (_i173 = 0; _i173 < _size169; ++_i173) + uint32_t _size173; + ::apache::thrift::protocol::TType _etype176; + xfer += iprot->readListBegin(_etype176, _size173); + this->encoding_stats.resize(_size173); + uint32_t _i177; + for (_i177 = 0; _i177 < _size173; ++_i177) { - xfer += this->encoding_stats[_i173].read(iprot); + xfer += this->encoding_stats[_i177].read(iprot); } xfer += iprot->readListEnd(); } @@ -5541,10 +5640,10 @@ uint32_t ColumnMetaData::write(::apache::thrift::protocol::TProtocol* oprot) con xfer += oprot->writeFieldBegin("encodings", ::apache::thrift::protocol::T_LIST, 2); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_I32, static_cast(this->encodings.size())); - std::vector ::const_iterator _iter174; - for (_iter174 = this->encodings.begin(); _iter174 != this->encodings.end(); ++_iter174) + std::vector ::const_iterator _iter178; + for (_iter178 = this->encodings.begin(); _iter178 != this->encodings.end(); ++_iter178) { - xfer += oprot->writeI32(static_cast((*_iter174))); + xfer += oprot->writeI32(static_cast((*_iter178))); } xfer += oprot->writeListEnd(); } @@ -5553,10 +5652,10 @@ uint32_t ColumnMetaData::write(::apache::thrift::protocol::TProtocol* oprot) con xfer += oprot->writeFieldBegin("path_in_schema", ::apache::thrift::protocol::T_LIST, 3); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast(this->path_in_schema.size())); - std::vector ::const_iterator _iter175; - for (_iter175 = this->path_in_schema.begin(); _iter175 != this->path_in_schema.end(); ++_iter175) + std::vector ::const_iterator _iter179; + for (_iter179 = this->path_in_schema.begin(); _iter179 != this->path_in_schema.end(); ++_iter179) { - xfer += oprot->writeString((*_iter175)); + xfer += oprot->writeString((*_iter179)); } xfer += oprot->writeListEnd(); } @@ -5582,10 +5681,10 @@ uint32_t ColumnMetaData::write(::apache::thrift::protocol::TProtocol* oprot) con xfer += oprot->writeFieldBegin("key_value_metadata", ::apache::thrift::protocol::T_LIST, 8); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast(this->key_value_metadata.size())); - std::vector ::const_iterator _iter176; - for (_iter176 = this->key_value_metadata.begin(); _iter176 != this->key_value_metadata.end(); ++_iter176) + std::vector ::const_iterator _iter180; + for (_iter180 = this->key_value_metadata.begin(); _iter180 != this->key_value_metadata.end(); ++_iter180) { - xfer += (*_iter176).write(oprot); + xfer += (*_iter180).write(oprot); } xfer += oprot->writeListEnd(); } @@ -5614,10 +5713,10 @@ uint32_t ColumnMetaData::write(::apache::thrift::protocol::TProtocol* oprot) con xfer += oprot->writeFieldBegin("encoding_stats", ::apache::thrift::protocol::T_LIST, 13); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast(this->encoding_stats.size())); - std::vector ::const_iterator _iter177; - for (_iter177 = this->encoding_stats.begin(); _iter177 != this->encoding_stats.end(); ++_iter177) + std::vector ::const_iterator _iter181; + for (_iter181 = this->encoding_stats.begin(); _iter181 != this->encoding_stats.end(); ++_iter181) { - xfer += (*_iter177).write(oprot); + xfer += (*_iter181).write(oprot); } xfer += oprot->writeListEnd(); } @@ -5652,74 +5751,74 @@ void swap(ColumnMetaData &a, ColumnMetaData &b) { swap(a.__isset, b.__isset); } -ColumnMetaData::ColumnMetaData(const ColumnMetaData& other178) { - type = other178.type; - encodings = other178.encodings; - path_in_schema = other178.path_in_schema; - codec = other178.codec; - num_values = other178.num_values; - total_uncompressed_size = other178.total_uncompressed_size; - total_compressed_size = other178.total_compressed_size; - key_value_metadata = other178.key_value_metadata; - data_page_offset = other178.data_page_offset; - index_page_offset = other178.index_page_offset; - dictionary_page_offset = other178.dictionary_page_offset; - statistics = other178.statistics; - encoding_stats = other178.encoding_stats; - bloom_filter_offset = other178.bloom_filter_offset; - __isset = other178.__isset; -} -ColumnMetaData::ColumnMetaData(ColumnMetaData&& other179) noexcept { - type = other179.type; - encodings = std::move(other179.encodings); - path_in_schema = std::move(other179.path_in_schema); - codec = other179.codec; - num_values = other179.num_values; - total_uncompressed_size = other179.total_uncompressed_size; - total_compressed_size = other179.total_compressed_size; - key_value_metadata = std::move(other179.key_value_metadata); - data_page_offset = other179.data_page_offset; - index_page_offset = other179.index_page_offset; - dictionary_page_offset = other179.dictionary_page_offset; - statistics = std::move(other179.statistics); - encoding_stats = std::move(other179.encoding_stats); - bloom_filter_offset = other179.bloom_filter_offset; - __isset = other179.__isset; -} -ColumnMetaData& ColumnMetaData::operator=(const ColumnMetaData& other180) { - type = other180.type; - encodings = other180.encodings; - path_in_schema = other180.path_in_schema; - codec = other180.codec; - num_values = other180.num_values; - total_uncompressed_size = other180.total_uncompressed_size; - total_compressed_size = other180.total_compressed_size; - key_value_metadata = other180.key_value_metadata; - data_page_offset = other180.data_page_offset; - index_page_offset = other180.index_page_offset; - dictionary_page_offset = other180.dictionary_page_offset; - statistics = other180.statistics; - encoding_stats = other180.encoding_stats; - bloom_filter_offset = other180.bloom_filter_offset; - __isset = other180.__isset; +ColumnMetaData::ColumnMetaData(const ColumnMetaData& other182) { + type = other182.type; + encodings = other182.encodings; + path_in_schema = other182.path_in_schema; + codec = other182.codec; + num_values = other182.num_values; + total_uncompressed_size = other182.total_uncompressed_size; + total_compressed_size = other182.total_compressed_size; + key_value_metadata = other182.key_value_metadata; + data_page_offset = other182.data_page_offset; + index_page_offset = other182.index_page_offset; + dictionary_page_offset = other182.dictionary_page_offset; + statistics = other182.statistics; + encoding_stats = other182.encoding_stats; + bloom_filter_offset = other182.bloom_filter_offset; + __isset = other182.__isset; +} +ColumnMetaData::ColumnMetaData(ColumnMetaData&& other183) noexcept { + type = other183.type; + encodings = std::move(other183.encodings); + path_in_schema = std::move(other183.path_in_schema); + codec = other183.codec; + num_values = other183.num_values; + total_uncompressed_size = other183.total_uncompressed_size; + total_compressed_size = other183.total_compressed_size; + key_value_metadata = std::move(other183.key_value_metadata); + data_page_offset = other183.data_page_offset; + index_page_offset = other183.index_page_offset; + dictionary_page_offset = other183.dictionary_page_offset; + statistics = std::move(other183.statistics); + encoding_stats = std::move(other183.encoding_stats); + bloom_filter_offset = other183.bloom_filter_offset; + __isset = other183.__isset; +} +ColumnMetaData& ColumnMetaData::operator=(const ColumnMetaData& other184) { + type = other184.type; + encodings = other184.encodings; + path_in_schema = other184.path_in_schema; + codec = other184.codec; + num_values = other184.num_values; + total_uncompressed_size = other184.total_uncompressed_size; + total_compressed_size = other184.total_compressed_size; + key_value_metadata = other184.key_value_metadata; + data_page_offset = other184.data_page_offset; + index_page_offset = other184.index_page_offset; + dictionary_page_offset = other184.dictionary_page_offset; + statistics = other184.statistics; + encoding_stats = other184.encoding_stats; + bloom_filter_offset = other184.bloom_filter_offset; + __isset = other184.__isset; return *this; } -ColumnMetaData& ColumnMetaData::operator=(ColumnMetaData&& other181) noexcept { - type = other181.type; - encodings = std::move(other181.encodings); - path_in_schema = std::move(other181.path_in_schema); - codec = other181.codec; - num_values = other181.num_values; - total_uncompressed_size = other181.total_uncompressed_size; - total_compressed_size = other181.total_compressed_size; - key_value_metadata = std::move(other181.key_value_metadata); - data_page_offset = other181.data_page_offset; - index_page_offset = other181.index_page_offset; - dictionary_page_offset = other181.dictionary_page_offset; - statistics = std::move(other181.statistics); - encoding_stats = std::move(other181.encoding_stats); - bloom_filter_offset = other181.bloom_filter_offset; - __isset = other181.__isset; +ColumnMetaData& ColumnMetaData::operator=(ColumnMetaData&& other185) noexcept { + type = other185.type; + encodings = std::move(other185.encodings); + path_in_schema = std::move(other185.path_in_schema); + codec = other185.codec; + num_values = other185.num_values; + total_uncompressed_size = other185.total_uncompressed_size; + total_compressed_size = other185.total_compressed_size; + key_value_metadata = std::move(other185.key_value_metadata); + data_page_offset = other185.data_page_offset; + index_page_offset = other185.index_page_offset; + dictionary_page_offset = other185.dictionary_page_offset; + statistics = std::move(other185.statistics); + encoding_stats = std::move(other185.encoding_stats); + bloom_filter_offset = other185.bloom_filter_offset; + __isset = other185.__isset; return *this; } void ColumnMetaData::printTo(std::ostream& out) const { @@ -5797,18 +5896,18 @@ void swap(EncryptionWithFooterKey &a, EncryptionWithFooterKey &b) { (void) b; } -EncryptionWithFooterKey::EncryptionWithFooterKey(const EncryptionWithFooterKey& other182) noexcept { - (void) other182; +EncryptionWithFooterKey::EncryptionWithFooterKey(const EncryptionWithFooterKey& other186) noexcept { + (void) other186; } -EncryptionWithFooterKey::EncryptionWithFooterKey(EncryptionWithFooterKey&& other183) noexcept { - (void) other183; +EncryptionWithFooterKey::EncryptionWithFooterKey(EncryptionWithFooterKey&& other187) noexcept { + (void) other187; } -EncryptionWithFooterKey& EncryptionWithFooterKey::operator=(const EncryptionWithFooterKey& other184) noexcept { - (void) other184; +EncryptionWithFooterKey& EncryptionWithFooterKey::operator=(const EncryptionWithFooterKey& other188) noexcept { + (void) other188; return *this; } -EncryptionWithFooterKey& EncryptionWithFooterKey::operator=(EncryptionWithFooterKey&& other185) noexcept { - (void) other185; +EncryptionWithFooterKey& EncryptionWithFooterKey::operator=(EncryptionWithFooterKey&& other189) noexcept { + (void) other189; return *this; } void EncryptionWithFooterKey::printTo(std::ostream& out) const { @@ -5863,14 +5962,14 @@ uint32_t EncryptionWithColumnKey::read(::apache::thrift::protocol::TProtocol* ip if (ftype == ::apache::thrift::protocol::T_LIST) { { this->path_in_schema.clear(); - uint32_t _size186; - ::apache::thrift::protocol::TType _etype189; - xfer += iprot->readListBegin(_etype189, _size186); - this->path_in_schema.resize(_size186); - uint32_t _i190; - for (_i190 = 0; _i190 < _size186; ++_i190) + uint32_t _size190; + ::apache::thrift::protocol::TType _etype193; + xfer += iprot->readListBegin(_etype193, _size190); + this->path_in_schema.resize(_size190); + uint32_t _i194; + for (_i194 = 0; _i194 < _size190; ++_i194) { - xfer += iprot->readString(this->path_in_schema[_i190]); + xfer += iprot->readString(this->path_in_schema[_i194]); } xfer += iprot->readListEnd(); } @@ -5909,10 +6008,10 @@ uint32_t EncryptionWithColumnKey::write(::apache::thrift::protocol::TProtocol* o xfer += oprot->writeFieldBegin("path_in_schema", ::apache::thrift::protocol::T_LIST, 1); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast(this->path_in_schema.size())); - std::vector ::const_iterator _iter191; - for (_iter191 = this->path_in_schema.begin(); _iter191 != this->path_in_schema.end(); ++_iter191) + std::vector ::const_iterator _iter195; + for (_iter195 = this->path_in_schema.begin(); _iter195 != this->path_in_schema.end(); ++_iter195) { - xfer += oprot->writeString((*_iter191)); + xfer += oprot->writeString((*_iter195)); } xfer += oprot->writeListEnd(); } @@ -5935,26 +6034,26 @@ void swap(EncryptionWithColumnKey &a, EncryptionWithColumnKey &b) { swap(a.__isset, b.__isset); } -EncryptionWithColumnKey::EncryptionWithColumnKey(const EncryptionWithColumnKey& other192) { - path_in_schema = other192.path_in_schema; - key_metadata = other192.key_metadata; - __isset = other192.__isset; +EncryptionWithColumnKey::EncryptionWithColumnKey(const EncryptionWithColumnKey& other196) { + path_in_schema = other196.path_in_schema; + key_metadata = other196.key_metadata; + __isset = other196.__isset; } -EncryptionWithColumnKey::EncryptionWithColumnKey(EncryptionWithColumnKey&& other193) noexcept { - path_in_schema = std::move(other193.path_in_schema); - key_metadata = std::move(other193.key_metadata); - __isset = other193.__isset; +EncryptionWithColumnKey::EncryptionWithColumnKey(EncryptionWithColumnKey&& other197) noexcept { + path_in_schema = std::move(other197.path_in_schema); + key_metadata = std::move(other197.key_metadata); + __isset = other197.__isset; } -EncryptionWithColumnKey& EncryptionWithColumnKey::operator=(const EncryptionWithColumnKey& other194) { - path_in_schema = other194.path_in_schema; - key_metadata = other194.key_metadata; - __isset = other194.__isset; +EncryptionWithColumnKey& EncryptionWithColumnKey::operator=(const EncryptionWithColumnKey& other198) { + path_in_schema = other198.path_in_schema; + key_metadata = other198.key_metadata; + __isset = other198.__isset; return *this; } -EncryptionWithColumnKey& EncryptionWithColumnKey::operator=(EncryptionWithColumnKey&& other195) noexcept { - path_in_schema = std::move(other195.path_in_schema); - key_metadata = std::move(other195.key_metadata); - __isset = other195.__isset; +EncryptionWithColumnKey& EncryptionWithColumnKey::operator=(EncryptionWithColumnKey&& other199) noexcept { + path_in_schema = std::move(other199.path_in_schema); + key_metadata = std::move(other199.key_metadata); + __isset = other199.__isset; return *this; } void EncryptionWithColumnKey::printTo(std::ostream& out) const { @@ -6062,26 +6161,26 @@ void swap(ColumnCryptoMetaData &a, ColumnCryptoMetaData &b) { swap(a.__isset, b.__isset); } -ColumnCryptoMetaData::ColumnCryptoMetaData(const ColumnCryptoMetaData& other196) { - ENCRYPTION_WITH_FOOTER_KEY = other196.ENCRYPTION_WITH_FOOTER_KEY; - ENCRYPTION_WITH_COLUMN_KEY = other196.ENCRYPTION_WITH_COLUMN_KEY; - __isset = other196.__isset; +ColumnCryptoMetaData::ColumnCryptoMetaData(const ColumnCryptoMetaData& other200) { + ENCRYPTION_WITH_FOOTER_KEY = other200.ENCRYPTION_WITH_FOOTER_KEY; + ENCRYPTION_WITH_COLUMN_KEY = other200.ENCRYPTION_WITH_COLUMN_KEY; + __isset = other200.__isset; } -ColumnCryptoMetaData::ColumnCryptoMetaData(ColumnCryptoMetaData&& other197) noexcept { - ENCRYPTION_WITH_FOOTER_KEY = std::move(other197.ENCRYPTION_WITH_FOOTER_KEY); - ENCRYPTION_WITH_COLUMN_KEY = std::move(other197.ENCRYPTION_WITH_COLUMN_KEY); - __isset = other197.__isset; +ColumnCryptoMetaData::ColumnCryptoMetaData(ColumnCryptoMetaData&& other201) noexcept { + ENCRYPTION_WITH_FOOTER_KEY = std::move(other201.ENCRYPTION_WITH_FOOTER_KEY); + ENCRYPTION_WITH_COLUMN_KEY = std::move(other201.ENCRYPTION_WITH_COLUMN_KEY); + __isset = other201.__isset; } -ColumnCryptoMetaData& ColumnCryptoMetaData::operator=(const ColumnCryptoMetaData& other198) { - ENCRYPTION_WITH_FOOTER_KEY = other198.ENCRYPTION_WITH_FOOTER_KEY; - ENCRYPTION_WITH_COLUMN_KEY = other198.ENCRYPTION_WITH_COLUMN_KEY; - __isset = other198.__isset; +ColumnCryptoMetaData& ColumnCryptoMetaData::operator=(const ColumnCryptoMetaData& other202) { + ENCRYPTION_WITH_FOOTER_KEY = other202.ENCRYPTION_WITH_FOOTER_KEY; + ENCRYPTION_WITH_COLUMN_KEY = other202.ENCRYPTION_WITH_COLUMN_KEY; + __isset = other202.__isset; return *this; } -ColumnCryptoMetaData& ColumnCryptoMetaData::operator=(ColumnCryptoMetaData&& other199) noexcept { - ENCRYPTION_WITH_FOOTER_KEY = std::move(other199.ENCRYPTION_WITH_FOOTER_KEY); - ENCRYPTION_WITH_COLUMN_KEY = std::move(other199.ENCRYPTION_WITH_COLUMN_KEY); - __isset = other199.__isset; +ColumnCryptoMetaData& ColumnCryptoMetaData::operator=(ColumnCryptoMetaData&& other203) noexcept { + ENCRYPTION_WITH_FOOTER_KEY = std::move(other203.ENCRYPTION_WITH_FOOTER_KEY); + ENCRYPTION_WITH_COLUMN_KEY = std::move(other203.ENCRYPTION_WITH_COLUMN_KEY); + __isset = other203.__isset; return *this; } void ColumnCryptoMetaData::printTo(std::ostream& out) const { @@ -6323,54 +6422,54 @@ void swap(ColumnChunk &a, ColumnChunk &b) { swap(a.__isset, b.__isset); } -ColumnChunk::ColumnChunk(const ColumnChunk& other200) { - file_path = other200.file_path; - file_offset = other200.file_offset; - meta_data = other200.meta_data; - offset_index_offset = other200.offset_index_offset; - offset_index_length = other200.offset_index_length; - column_index_offset = other200.column_index_offset; - column_index_length = other200.column_index_length; - crypto_metadata = other200.crypto_metadata; - encrypted_column_metadata = other200.encrypted_column_metadata; - __isset = other200.__isset; -} -ColumnChunk::ColumnChunk(ColumnChunk&& other201) noexcept { - file_path = std::move(other201.file_path); - file_offset = other201.file_offset; - meta_data = std::move(other201.meta_data); - offset_index_offset = other201.offset_index_offset; - offset_index_length = other201.offset_index_length; - column_index_offset = other201.column_index_offset; - column_index_length = other201.column_index_length; - crypto_metadata = std::move(other201.crypto_metadata); - encrypted_column_metadata = std::move(other201.encrypted_column_metadata); - __isset = other201.__isset; -} -ColumnChunk& ColumnChunk::operator=(const ColumnChunk& other202) { - file_path = other202.file_path; - file_offset = other202.file_offset; - meta_data = other202.meta_data; - offset_index_offset = other202.offset_index_offset; - offset_index_length = other202.offset_index_length; - column_index_offset = other202.column_index_offset; - column_index_length = other202.column_index_length; - crypto_metadata = other202.crypto_metadata; - encrypted_column_metadata = other202.encrypted_column_metadata; - __isset = other202.__isset; +ColumnChunk::ColumnChunk(const ColumnChunk& other204) { + file_path = other204.file_path; + file_offset = other204.file_offset; + meta_data = other204.meta_data; + offset_index_offset = other204.offset_index_offset; + offset_index_length = other204.offset_index_length; + column_index_offset = other204.column_index_offset; + column_index_length = other204.column_index_length; + crypto_metadata = other204.crypto_metadata; + encrypted_column_metadata = other204.encrypted_column_metadata; + __isset = other204.__isset; +} +ColumnChunk::ColumnChunk(ColumnChunk&& other205) noexcept { + file_path = std::move(other205.file_path); + file_offset = other205.file_offset; + meta_data = std::move(other205.meta_data); + offset_index_offset = other205.offset_index_offset; + offset_index_length = other205.offset_index_length; + column_index_offset = other205.column_index_offset; + column_index_length = other205.column_index_length; + crypto_metadata = std::move(other205.crypto_metadata); + encrypted_column_metadata = std::move(other205.encrypted_column_metadata); + __isset = other205.__isset; +} +ColumnChunk& ColumnChunk::operator=(const ColumnChunk& other206) { + file_path = other206.file_path; + file_offset = other206.file_offset; + meta_data = other206.meta_data; + offset_index_offset = other206.offset_index_offset; + offset_index_length = other206.offset_index_length; + column_index_offset = other206.column_index_offset; + column_index_length = other206.column_index_length; + crypto_metadata = other206.crypto_metadata; + encrypted_column_metadata = other206.encrypted_column_metadata; + __isset = other206.__isset; return *this; } -ColumnChunk& ColumnChunk::operator=(ColumnChunk&& other203) noexcept { - file_path = std::move(other203.file_path); - file_offset = other203.file_offset; - meta_data = std::move(other203.meta_data); - offset_index_offset = other203.offset_index_offset; - offset_index_length = other203.offset_index_length; - column_index_offset = other203.column_index_offset; - column_index_length = other203.column_index_length; - crypto_metadata = std::move(other203.crypto_metadata); - encrypted_column_metadata = std::move(other203.encrypted_column_metadata); - __isset = other203.__isset; +ColumnChunk& ColumnChunk::operator=(ColumnChunk&& other207) noexcept { + file_path = std::move(other207.file_path); + file_offset = other207.file_offset; + meta_data = std::move(other207.meta_data); + offset_index_offset = other207.offset_index_offset; + offset_index_length = other207.offset_index_length; + column_index_offset = other207.column_index_offset; + column_index_length = other207.column_index_length; + crypto_metadata = std::move(other207.crypto_metadata); + encrypted_column_metadata = std::move(other207.encrypted_column_metadata); + __isset = other207.__isset; return *this; } void ColumnChunk::printTo(std::ostream& out) const { @@ -6459,14 +6558,14 @@ uint32_t RowGroup::read(::apache::thrift::protocol::TProtocol* iprot) { if (ftype == ::apache::thrift::protocol::T_LIST) { { this->columns.clear(); - uint32_t _size204; - ::apache::thrift::protocol::TType _etype207; - xfer += iprot->readListBegin(_etype207, _size204); - this->columns.resize(_size204); - uint32_t _i208; - for (_i208 = 0; _i208 < _size204; ++_i208) + uint32_t _size208; + ::apache::thrift::protocol::TType _etype211; + xfer += iprot->readListBegin(_etype211, _size208); + this->columns.resize(_size208); + uint32_t _i212; + for (_i212 = 0; _i212 < _size208; ++_i212) { - xfer += this->columns[_i208].read(iprot); + xfer += this->columns[_i212].read(iprot); } xfer += iprot->readListEnd(); } @@ -6495,14 +6594,14 @@ uint32_t RowGroup::read(::apache::thrift::protocol::TProtocol* iprot) { if (ftype == ::apache::thrift::protocol::T_LIST) { { this->sorting_columns.clear(); - uint32_t _size209; - ::apache::thrift::protocol::TType _etype212; - xfer += iprot->readListBegin(_etype212, _size209); - this->sorting_columns.resize(_size209); - uint32_t _i213; - for (_i213 = 0; _i213 < _size209; ++_i213) + uint32_t _size213; + ::apache::thrift::protocol::TType _etype216; + xfer += iprot->readListBegin(_etype216, _size213); + this->sorting_columns.resize(_size213); + uint32_t _i217; + for (_i217 = 0; _i217 < _size213; ++_i217) { - xfer += this->sorting_columns[_i213].read(iprot); + xfer += this->sorting_columns[_i217].read(iprot); } xfer += iprot->readListEnd(); } @@ -6561,10 +6660,10 @@ uint32_t RowGroup::write(::apache::thrift::protocol::TProtocol* oprot) const { xfer += oprot->writeFieldBegin("columns", ::apache::thrift::protocol::T_LIST, 1); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast(this->columns.size())); - std::vector ::const_iterator _iter214; - for (_iter214 = this->columns.begin(); _iter214 != this->columns.end(); ++_iter214) + std::vector ::const_iterator _iter218; + for (_iter218 = this->columns.begin(); _iter218 != this->columns.end(); ++_iter218) { - xfer += (*_iter214).write(oprot); + xfer += (*_iter218).write(oprot); } xfer += oprot->writeListEnd(); } @@ -6582,10 +6681,10 @@ uint32_t RowGroup::write(::apache::thrift::protocol::TProtocol* oprot) const { xfer += oprot->writeFieldBegin("sorting_columns", ::apache::thrift::protocol::T_LIST, 4); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast(this->sorting_columns.size())); - std::vector ::const_iterator _iter215; - for (_iter215 = this->sorting_columns.begin(); _iter215 != this->sorting_columns.end(); ++_iter215) + std::vector ::const_iterator _iter219; + for (_iter219 = this->sorting_columns.begin(); _iter219 != this->sorting_columns.end(); ++_iter219) { - xfer += (*_iter215).write(oprot); + xfer += (*_iter219).write(oprot); } xfer += oprot->writeListEnd(); } @@ -6623,46 +6722,46 @@ void swap(RowGroup &a, RowGroup &b) { swap(a.__isset, b.__isset); } -RowGroup::RowGroup(const RowGroup& other216) { - columns = other216.columns; - total_byte_size = other216.total_byte_size; - num_rows = other216.num_rows; - sorting_columns = other216.sorting_columns; - file_offset = other216.file_offset; - total_compressed_size = other216.total_compressed_size; - ordinal = other216.ordinal; - __isset = other216.__isset; -} -RowGroup::RowGroup(RowGroup&& other217) noexcept { - columns = std::move(other217.columns); - total_byte_size = other217.total_byte_size; - num_rows = other217.num_rows; - sorting_columns = std::move(other217.sorting_columns); - file_offset = other217.file_offset; - total_compressed_size = other217.total_compressed_size; - ordinal = other217.ordinal; - __isset = other217.__isset; -} -RowGroup& RowGroup::operator=(const RowGroup& other218) { - columns = other218.columns; - total_byte_size = other218.total_byte_size; - num_rows = other218.num_rows; - sorting_columns = other218.sorting_columns; - file_offset = other218.file_offset; - total_compressed_size = other218.total_compressed_size; - ordinal = other218.ordinal; - __isset = other218.__isset; +RowGroup::RowGroup(const RowGroup& other220) { + columns = other220.columns; + total_byte_size = other220.total_byte_size; + num_rows = other220.num_rows; + sorting_columns = other220.sorting_columns; + file_offset = other220.file_offset; + total_compressed_size = other220.total_compressed_size; + ordinal = other220.ordinal; + __isset = other220.__isset; +} +RowGroup::RowGroup(RowGroup&& other221) noexcept { + columns = std::move(other221.columns); + total_byte_size = other221.total_byte_size; + num_rows = other221.num_rows; + sorting_columns = std::move(other221.sorting_columns); + file_offset = other221.file_offset; + total_compressed_size = other221.total_compressed_size; + ordinal = other221.ordinal; + __isset = other221.__isset; +} +RowGroup& RowGroup::operator=(const RowGroup& other222) { + columns = other222.columns; + total_byte_size = other222.total_byte_size; + num_rows = other222.num_rows; + sorting_columns = other222.sorting_columns; + file_offset = other222.file_offset; + total_compressed_size = other222.total_compressed_size; + ordinal = other222.ordinal; + __isset = other222.__isset; return *this; } -RowGroup& RowGroup::operator=(RowGroup&& other219) noexcept { - columns = std::move(other219.columns); - total_byte_size = other219.total_byte_size; - num_rows = other219.num_rows; - sorting_columns = std::move(other219.sorting_columns); - file_offset = other219.file_offset; - total_compressed_size = other219.total_compressed_size; - ordinal = other219.ordinal; - __isset = other219.__isset; +RowGroup& RowGroup::operator=(RowGroup&& other223) noexcept { + columns = std::move(other223.columns); + total_byte_size = other223.total_byte_size; + num_rows = other223.num_rows; + sorting_columns = std::move(other223.sorting_columns); + file_offset = other223.file_offset; + total_compressed_size = other223.total_compressed_size; + ordinal = other223.ordinal; + __isset = other223.__isset; return *this; } void RowGroup::printTo(std::ostream& out) const { @@ -6733,18 +6832,18 @@ void swap(TypeDefinedOrder &a, TypeDefinedOrder &b) { (void) b; } -TypeDefinedOrder::TypeDefinedOrder(const TypeDefinedOrder& other220) noexcept { - (void) other220; +TypeDefinedOrder::TypeDefinedOrder(const TypeDefinedOrder& other224) noexcept { + (void) other224; } -TypeDefinedOrder::TypeDefinedOrder(TypeDefinedOrder&& other221) noexcept { - (void) other221; +TypeDefinedOrder::TypeDefinedOrder(TypeDefinedOrder&& other225) noexcept { + (void) other225; } -TypeDefinedOrder& TypeDefinedOrder::operator=(const TypeDefinedOrder& other222) noexcept { - (void) other222; +TypeDefinedOrder& TypeDefinedOrder::operator=(const TypeDefinedOrder& other226) noexcept { + (void) other226; return *this; } -TypeDefinedOrder& TypeDefinedOrder::operator=(TypeDefinedOrder&& other223) noexcept { - (void) other223; +TypeDefinedOrder& TypeDefinedOrder::operator=(TypeDefinedOrder&& other227) noexcept { + (void) other227; return *this; } void TypeDefinedOrder::printTo(std::ostream& out) const { @@ -6831,22 +6930,22 @@ void swap(ColumnOrder &a, ColumnOrder &b) { swap(a.__isset, b.__isset); } -ColumnOrder::ColumnOrder(const ColumnOrder& other224) noexcept { - TYPE_ORDER = other224.TYPE_ORDER; - __isset = other224.__isset; +ColumnOrder::ColumnOrder(const ColumnOrder& other228) noexcept { + TYPE_ORDER = other228.TYPE_ORDER; + __isset = other228.__isset; } -ColumnOrder::ColumnOrder(ColumnOrder&& other225) noexcept { - TYPE_ORDER = std::move(other225.TYPE_ORDER); - __isset = other225.__isset; +ColumnOrder::ColumnOrder(ColumnOrder&& other229) noexcept { + TYPE_ORDER = std::move(other229.TYPE_ORDER); + __isset = other229.__isset; } -ColumnOrder& ColumnOrder::operator=(const ColumnOrder& other226) noexcept { - TYPE_ORDER = other226.TYPE_ORDER; - __isset = other226.__isset; +ColumnOrder& ColumnOrder::operator=(const ColumnOrder& other230) noexcept { + TYPE_ORDER = other230.TYPE_ORDER; + __isset = other230.__isset; return *this; } -ColumnOrder& ColumnOrder::operator=(ColumnOrder&& other227) noexcept { - TYPE_ORDER = std::move(other227.TYPE_ORDER); - __isset = other227.__isset; +ColumnOrder& ColumnOrder::operator=(ColumnOrder&& other231) noexcept { + TYPE_ORDER = std::move(other231.TYPE_ORDER); + __isset = other231.__isset; return *this; } void ColumnOrder::printTo(std::ostream& out) const { @@ -6974,26 +7073,26 @@ void swap(PageLocation &a, PageLocation &b) { swap(a.first_row_index, b.first_row_index); } -PageLocation::PageLocation(const PageLocation& other228) noexcept { - offset = other228.offset; - compressed_page_size = other228.compressed_page_size; - first_row_index = other228.first_row_index; +PageLocation::PageLocation(const PageLocation& other232) noexcept { + offset = other232.offset; + compressed_page_size = other232.compressed_page_size; + first_row_index = other232.first_row_index; } -PageLocation::PageLocation(PageLocation&& other229) noexcept { - offset = other229.offset; - compressed_page_size = other229.compressed_page_size; - first_row_index = other229.first_row_index; +PageLocation::PageLocation(PageLocation&& other233) noexcept { + offset = other233.offset; + compressed_page_size = other233.compressed_page_size; + first_row_index = other233.first_row_index; } -PageLocation& PageLocation::operator=(const PageLocation& other230) noexcept { - offset = other230.offset; - compressed_page_size = other230.compressed_page_size; - first_row_index = other230.first_row_index; +PageLocation& PageLocation::operator=(const PageLocation& other234) noexcept { + offset = other234.offset; + compressed_page_size = other234.compressed_page_size; + first_row_index = other234.first_row_index; return *this; } -PageLocation& PageLocation::operator=(PageLocation&& other231) noexcept { - offset = other231.offset; - compressed_page_size = other231.compressed_page_size; - first_row_index = other231.first_row_index; +PageLocation& PageLocation::operator=(PageLocation&& other235) noexcept { + offset = other235.offset; + compressed_page_size = other235.compressed_page_size; + first_row_index = other235.first_row_index; return *this; } void PageLocation::printTo(std::ostream& out) const { @@ -7046,14 +7145,14 @@ uint32_t OffsetIndex::read(::apache::thrift::protocol::TProtocol* iprot) { if (ftype == ::apache::thrift::protocol::T_LIST) { { this->page_locations.clear(); - uint32_t _size232; - ::apache::thrift::protocol::TType _etype235; - xfer += iprot->readListBegin(_etype235, _size232); - this->page_locations.resize(_size232); - uint32_t _i236; - for (_i236 = 0; _i236 < _size232; ++_i236) + uint32_t _size236; + ::apache::thrift::protocol::TType _etype239; + xfer += iprot->readListBegin(_etype239, _size236); + this->page_locations.resize(_size236); + uint32_t _i240; + for (_i240 = 0; _i240 < _size236; ++_i240) { - xfer += this->page_locations[_i236].read(iprot); + xfer += this->page_locations[_i240].read(iprot); } xfer += iprot->readListEnd(); } @@ -7084,10 +7183,10 @@ uint32_t OffsetIndex::write(::apache::thrift::protocol::TProtocol* oprot) const xfer += oprot->writeFieldBegin("page_locations", ::apache::thrift::protocol::T_LIST, 1); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast(this->page_locations.size())); - std::vector ::const_iterator _iter237; - for (_iter237 = this->page_locations.begin(); _iter237 != this->page_locations.end(); ++_iter237) + std::vector ::const_iterator _iter241; + for (_iter241 = this->page_locations.begin(); _iter241 != this->page_locations.end(); ++_iter241) { - xfer += (*_iter237).write(oprot); + xfer += (*_iter241).write(oprot); } xfer += oprot->writeListEnd(); } @@ -7103,18 +7202,18 @@ void swap(OffsetIndex &a, OffsetIndex &b) { swap(a.page_locations, b.page_locations); } -OffsetIndex::OffsetIndex(const OffsetIndex& other238) { - page_locations = other238.page_locations; +OffsetIndex::OffsetIndex(const OffsetIndex& other242) { + page_locations = other242.page_locations; } -OffsetIndex::OffsetIndex(OffsetIndex&& other239) noexcept { - page_locations = std::move(other239.page_locations); +OffsetIndex::OffsetIndex(OffsetIndex&& other243) noexcept { + page_locations = std::move(other243.page_locations); } -OffsetIndex& OffsetIndex::operator=(const OffsetIndex& other240) { - page_locations = other240.page_locations; +OffsetIndex& OffsetIndex::operator=(const OffsetIndex& other244) { + page_locations = other244.page_locations; return *this; } -OffsetIndex& OffsetIndex::operator=(OffsetIndex&& other241) noexcept { - page_locations = std::move(other241.page_locations); +OffsetIndex& OffsetIndex::operator=(OffsetIndex&& other245) noexcept { + page_locations = std::move(other245.page_locations); return *this; } void OffsetIndex::printTo(std::ostream& out) const { @@ -7185,14 +7284,14 @@ uint32_t ColumnIndex::read(::apache::thrift::protocol::TProtocol* iprot) { if (ftype == ::apache::thrift::protocol::T_LIST) { { this->null_pages.clear(); - uint32_t _size242; - ::apache::thrift::protocol::TType _etype245; - xfer += iprot->readListBegin(_etype245, _size242); - this->null_pages.resize(_size242); - uint32_t _i246; - for (_i246 = 0; _i246 < _size242; ++_i246) + uint32_t _size246; + ::apache::thrift::protocol::TType _etype249; + xfer += iprot->readListBegin(_etype249, _size246); + this->null_pages.resize(_size246); + uint32_t _i250; + for (_i250 = 0; _i250 < _size246; ++_i250) { - xfer += iprot->readBool(this->null_pages[_i246]); + xfer += iprot->readBool(this->null_pages[_i250]); } xfer += iprot->readListEnd(); } @@ -7205,14 +7304,14 @@ uint32_t ColumnIndex::read(::apache::thrift::protocol::TProtocol* iprot) { if (ftype == ::apache::thrift::protocol::T_LIST) { { this->min_values.clear(); - uint32_t _size247; - ::apache::thrift::protocol::TType _etype250; - xfer += iprot->readListBegin(_etype250, _size247); - this->min_values.resize(_size247); - uint32_t _i251; - for (_i251 = 0; _i251 < _size247; ++_i251) + uint32_t _size251; + ::apache::thrift::protocol::TType _etype254; + xfer += iprot->readListBegin(_etype254, _size251); + this->min_values.resize(_size251); + uint32_t _i255; + for (_i255 = 0; _i255 < _size251; ++_i255) { - xfer += iprot->readBinary(this->min_values[_i251]); + xfer += iprot->readBinary(this->min_values[_i255]); } xfer += iprot->readListEnd(); } @@ -7225,14 +7324,14 @@ uint32_t ColumnIndex::read(::apache::thrift::protocol::TProtocol* iprot) { if (ftype == ::apache::thrift::protocol::T_LIST) { { this->max_values.clear(); - uint32_t _size252; - ::apache::thrift::protocol::TType _etype255; - xfer += iprot->readListBegin(_etype255, _size252); - this->max_values.resize(_size252); - uint32_t _i256; - for (_i256 = 0; _i256 < _size252; ++_i256) + uint32_t _size256; + ::apache::thrift::protocol::TType _etype259; + xfer += iprot->readListBegin(_etype259, _size256); + this->max_values.resize(_size256); + uint32_t _i260; + for (_i260 = 0; _i260 < _size256; ++_i260) { - xfer += iprot->readBinary(this->max_values[_i256]); + xfer += iprot->readBinary(this->max_values[_i260]); } xfer += iprot->readListEnd(); } @@ -7243,9 +7342,9 @@ uint32_t ColumnIndex::read(::apache::thrift::protocol::TProtocol* iprot) { break; case 4: if (ftype == ::apache::thrift::protocol::T_I32) { - int32_t ecast257; - xfer += iprot->readI32(ecast257); - this->boundary_order = static_cast(ecast257); + int32_t ecast261; + xfer += iprot->readI32(ecast261); + this->boundary_order = static_cast(ecast261); isset_boundary_order = true; } else { xfer += iprot->skip(ftype); @@ -7255,14 +7354,14 @@ uint32_t ColumnIndex::read(::apache::thrift::protocol::TProtocol* iprot) { if (ftype == ::apache::thrift::protocol::T_LIST) { { this->null_counts.clear(); - uint32_t _size258; - ::apache::thrift::protocol::TType _etype261; - xfer += iprot->readListBegin(_etype261, _size258); - this->null_counts.resize(_size258); - uint32_t _i262; - for (_i262 = 0; _i262 < _size258; ++_i262) + uint32_t _size262; + ::apache::thrift::protocol::TType _etype265; + xfer += iprot->readListBegin(_etype265, _size262); + this->null_counts.resize(_size262); + uint32_t _i266; + for (_i266 = 0; _i266 < _size262; ++_i266) { - xfer += iprot->readI64(this->null_counts[_i262]); + xfer += iprot->readI64(this->null_counts[_i266]); } xfer += iprot->readListEnd(); } @@ -7299,10 +7398,10 @@ uint32_t ColumnIndex::write(::apache::thrift::protocol::TProtocol* oprot) const xfer += oprot->writeFieldBegin("null_pages", ::apache::thrift::protocol::T_LIST, 1); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_BOOL, static_cast(this->null_pages.size())); - std::vector ::const_iterator _iter263; - for (_iter263 = this->null_pages.begin(); _iter263 != this->null_pages.end(); ++_iter263) + std::vector ::const_iterator _iter267; + for (_iter267 = this->null_pages.begin(); _iter267 != this->null_pages.end(); ++_iter267) { - xfer += oprot->writeBool((*_iter263)); + xfer += oprot->writeBool((*_iter267)); } xfer += oprot->writeListEnd(); } @@ -7311,10 +7410,10 @@ uint32_t ColumnIndex::write(::apache::thrift::protocol::TProtocol* oprot) const xfer += oprot->writeFieldBegin("min_values", ::apache::thrift::protocol::T_LIST, 2); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast(this->min_values.size())); - std::vector ::const_iterator _iter264; - for (_iter264 = this->min_values.begin(); _iter264 != this->min_values.end(); ++_iter264) + std::vector ::const_iterator _iter268; + for (_iter268 = this->min_values.begin(); _iter268 != this->min_values.end(); ++_iter268) { - xfer += oprot->writeBinary((*_iter264)); + xfer += oprot->writeBinary((*_iter268)); } xfer += oprot->writeListEnd(); } @@ -7323,10 +7422,10 @@ uint32_t ColumnIndex::write(::apache::thrift::protocol::TProtocol* oprot) const xfer += oprot->writeFieldBegin("max_values", ::apache::thrift::protocol::T_LIST, 3); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast(this->max_values.size())); - std::vector ::const_iterator _iter265; - for (_iter265 = this->max_values.begin(); _iter265 != this->max_values.end(); ++_iter265) + std::vector ::const_iterator _iter269; + for (_iter269 = this->max_values.begin(); _iter269 != this->max_values.end(); ++_iter269) { - xfer += oprot->writeBinary((*_iter265)); + xfer += oprot->writeBinary((*_iter269)); } xfer += oprot->writeListEnd(); } @@ -7340,10 +7439,10 @@ uint32_t ColumnIndex::write(::apache::thrift::protocol::TProtocol* oprot) const xfer += oprot->writeFieldBegin("null_counts", ::apache::thrift::protocol::T_LIST, 5); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_I64, static_cast(this->null_counts.size())); - std::vector ::const_iterator _iter266; - for (_iter266 = this->null_counts.begin(); _iter266 != this->null_counts.end(); ++_iter266) + std::vector ::const_iterator _iter270; + for (_iter270 = this->null_counts.begin(); _iter270 != this->null_counts.end(); ++_iter270) { - xfer += oprot->writeI64((*_iter266)); + xfer += oprot->writeI64((*_iter270)); } xfer += oprot->writeListEnd(); } @@ -7364,38 +7463,38 @@ void swap(ColumnIndex &a, ColumnIndex &b) { swap(a.__isset, b.__isset); } -ColumnIndex::ColumnIndex(const ColumnIndex& other267) { - null_pages = other267.null_pages; - min_values = other267.min_values; - max_values = other267.max_values; - boundary_order = other267.boundary_order; - null_counts = other267.null_counts; - __isset = other267.__isset; -} -ColumnIndex::ColumnIndex(ColumnIndex&& other268) noexcept { - null_pages = std::move(other268.null_pages); - min_values = std::move(other268.min_values); - max_values = std::move(other268.max_values); - boundary_order = other268.boundary_order; - null_counts = std::move(other268.null_counts); - __isset = other268.__isset; -} -ColumnIndex& ColumnIndex::operator=(const ColumnIndex& other269) { - null_pages = other269.null_pages; - min_values = other269.min_values; - max_values = other269.max_values; - boundary_order = other269.boundary_order; - null_counts = other269.null_counts; - __isset = other269.__isset; +ColumnIndex::ColumnIndex(const ColumnIndex& other271) { + null_pages = other271.null_pages; + min_values = other271.min_values; + max_values = other271.max_values; + boundary_order = other271.boundary_order; + null_counts = other271.null_counts; + __isset = other271.__isset; +} +ColumnIndex::ColumnIndex(ColumnIndex&& other272) noexcept { + null_pages = std::move(other272.null_pages); + min_values = std::move(other272.min_values); + max_values = std::move(other272.max_values); + boundary_order = other272.boundary_order; + null_counts = std::move(other272.null_counts); + __isset = other272.__isset; +} +ColumnIndex& ColumnIndex::operator=(const ColumnIndex& other273) { + null_pages = other273.null_pages; + min_values = other273.min_values; + max_values = other273.max_values; + boundary_order = other273.boundary_order; + null_counts = other273.null_counts; + __isset = other273.__isset; return *this; } -ColumnIndex& ColumnIndex::operator=(ColumnIndex&& other270) noexcept { - null_pages = std::move(other270.null_pages); - min_values = std::move(other270.min_values); - max_values = std::move(other270.max_values); - boundary_order = other270.boundary_order; - null_counts = std::move(other270.null_counts); - __isset = other270.__isset; +ColumnIndex& ColumnIndex::operator=(ColumnIndex&& other274) noexcept { + null_pages = std::move(other274.null_pages); + min_values = std::move(other274.min_values); + max_values = std::move(other274.max_values); + boundary_order = other274.boundary_order; + null_counts = std::move(other274.null_counts); + __isset = other274.__isset; return *this; } void ColumnIndex::printTo(std::ostream& out) const { @@ -7525,30 +7624,30 @@ void swap(AesGcmV1 &a, AesGcmV1 &b) { swap(a.__isset, b.__isset); } -AesGcmV1::AesGcmV1(const AesGcmV1& other271) { - aad_prefix = other271.aad_prefix; - aad_file_unique = other271.aad_file_unique; - supply_aad_prefix = other271.supply_aad_prefix; - __isset = other271.__isset; +AesGcmV1::AesGcmV1(const AesGcmV1& other275) { + aad_prefix = other275.aad_prefix; + aad_file_unique = other275.aad_file_unique; + supply_aad_prefix = other275.supply_aad_prefix; + __isset = other275.__isset; } -AesGcmV1::AesGcmV1(AesGcmV1&& other272) noexcept { - aad_prefix = std::move(other272.aad_prefix); - aad_file_unique = std::move(other272.aad_file_unique); - supply_aad_prefix = other272.supply_aad_prefix; - __isset = other272.__isset; +AesGcmV1::AesGcmV1(AesGcmV1&& other276) noexcept { + aad_prefix = std::move(other276.aad_prefix); + aad_file_unique = std::move(other276.aad_file_unique); + supply_aad_prefix = other276.supply_aad_prefix; + __isset = other276.__isset; } -AesGcmV1& AesGcmV1::operator=(const AesGcmV1& other273) { - aad_prefix = other273.aad_prefix; - aad_file_unique = other273.aad_file_unique; - supply_aad_prefix = other273.supply_aad_prefix; - __isset = other273.__isset; +AesGcmV1& AesGcmV1::operator=(const AesGcmV1& other277) { + aad_prefix = other277.aad_prefix; + aad_file_unique = other277.aad_file_unique; + supply_aad_prefix = other277.supply_aad_prefix; + __isset = other277.__isset; return *this; } -AesGcmV1& AesGcmV1::operator=(AesGcmV1&& other274) noexcept { - aad_prefix = std::move(other274.aad_prefix); - aad_file_unique = std::move(other274.aad_file_unique); - supply_aad_prefix = other274.supply_aad_prefix; - __isset = other274.__isset; +AesGcmV1& AesGcmV1::operator=(AesGcmV1&& other278) noexcept { + aad_prefix = std::move(other278.aad_prefix); + aad_file_unique = std::move(other278.aad_file_unique); + supply_aad_prefix = other278.supply_aad_prefix; + __isset = other278.__isset; return *this; } void AesGcmV1::printTo(std::ostream& out) const { @@ -7676,30 +7775,30 @@ void swap(AesGcmCtrV1 &a, AesGcmCtrV1 &b) { swap(a.__isset, b.__isset); } -AesGcmCtrV1::AesGcmCtrV1(const AesGcmCtrV1& other275) { - aad_prefix = other275.aad_prefix; - aad_file_unique = other275.aad_file_unique; - supply_aad_prefix = other275.supply_aad_prefix; - __isset = other275.__isset; +AesGcmCtrV1::AesGcmCtrV1(const AesGcmCtrV1& other279) { + aad_prefix = other279.aad_prefix; + aad_file_unique = other279.aad_file_unique; + supply_aad_prefix = other279.supply_aad_prefix; + __isset = other279.__isset; } -AesGcmCtrV1::AesGcmCtrV1(AesGcmCtrV1&& other276) noexcept { - aad_prefix = std::move(other276.aad_prefix); - aad_file_unique = std::move(other276.aad_file_unique); - supply_aad_prefix = other276.supply_aad_prefix; - __isset = other276.__isset; +AesGcmCtrV1::AesGcmCtrV1(AesGcmCtrV1&& other280) noexcept { + aad_prefix = std::move(other280.aad_prefix); + aad_file_unique = std::move(other280.aad_file_unique); + supply_aad_prefix = other280.supply_aad_prefix; + __isset = other280.__isset; } -AesGcmCtrV1& AesGcmCtrV1::operator=(const AesGcmCtrV1& other277) { - aad_prefix = other277.aad_prefix; - aad_file_unique = other277.aad_file_unique; - supply_aad_prefix = other277.supply_aad_prefix; - __isset = other277.__isset; +AesGcmCtrV1& AesGcmCtrV1::operator=(const AesGcmCtrV1& other281) { + aad_prefix = other281.aad_prefix; + aad_file_unique = other281.aad_file_unique; + supply_aad_prefix = other281.supply_aad_prefix; + __isset = other281.__isset; return *this; } -AesGcmCtrV1& AesGcmCtrV1::operator=(AesGcmCtrV1&& other278) noexcept { - aad_prefix = std::move(other278.aad_prefix); - aad_file_unique = std::move(other278.aad_file_unique); - supply_aad_prefix = other278.supply_aad_prefix; - __isset = other278.__isset; +AesGcmCtrV1& AesGcmCtrV1::operator=(AesGcmCtrV1&& other282) noexcept { + aad_prefix = std::move(other282.aad_prefix); + aad_file_unique = std::move(other282.aad_file_unique); + supply_aad_prefix = other282.supply_aad_prefix; + __isset = other282.__isset; return *this; } void AesGcmCtrV1::printTo(std::ostream& out) const { @@ -7808,26 +7907,26 @@ void swap(EncryptionAlgorithm &a, EncryptionAlgorithm &b) { swap(a.__isset, b.__isset); } -EncryptionAlgorithm::EncryptionAlgorithm(const EncryptionAlgorithm& other279) { - AES_GCM_V1 = other279.AES_GCM_V1; - AES_GCM_CTR_V1 = other279.AES_GCM_CTR_V1; - __isset = other279.__isset; +EncryptionAlgorithm::EncryptionAlgorithm(const EncryptionAlgorithm& other283) { + AES_GCM_V1 = other283.AES_GCM_V1; + AES_GCM_CTR_V1 = other283.AES_GCM_CTR_V1; + __isset = other283.__isset; } -EncryptionAlgorithm::EncryptionAlgorithm(EncryptionAlgorithm&& other280) noexcept { - AES_GCM_V1 = std::move(other280.AES_GCM_V1); - AES_GCM_CTR_V1 = std::move(other280.AES_GCM_CTR_V1); - __isset = other280.__isset; +EncryptionAlgorithm::EncryptionAlgorithm(EncryptionAlgorithm&& other284) noexcept { + AES_GCM_V1 = std::move(other284.AES_GCM_V1); + AES_GCM_CTR_V1 = std::move(other284.AES_GCM_CTR_V1); + __isset = other284.__isset; } -EncryptionAlgorithm& EncryptionAlgorithm::operator=(const EncryptionAlgorithm& other281) { - AES_GCM_V1 = other281.AES_GCM_V1; - AES_GCM_CTR_V1 = other281.AES_GCM_CTR_V1; - __isset = other281.__isset; +EncryptionAlgorithm& EncryptionAlgorithm::operator=(const EncryptionAlgorithm& other285) { + AES_GCM_V1 = other285.AES_GCM_V1; + AES_GCM_CTR_V1 = other285.AES_GCM_CTR_V1; + __isset = other285.__isset; return *this; } -EncryptionAlgorithm& EncryptionAlgorithm::operator=(EncryptionAlgorithm&& other282) noexcept { - AES_GCM_V1 = std::move(other282.AES_GCM_V1); - AES_GCM_CTR_V1 = std::move(other282.AES_GCM_CTR_V1); - __isset = other282.__isset; +EncryptionAlgorithm& EncryptionAlgorithm::operator=(EncryptionAlgorithm&& other286) noexcept { + AES_GCM_V1 = std::move(other286.AES_GCM_V1); + AES_GCM_CTR_V1 = std::move(other286.AES_GCM_CTR_V1); + __isset = other286.__isset; return *this; } void EncryptionAlgorithm::printTo(std::ostream& out) const { @@ -7927,14 +8026,14 @@ uint32_t FileMetaData::read(::apache::thrift::protocol::TProtocol* iprot) { if (ftype == ::apache::thrift::protocol::T_LIST) { { this->schema.clear(); - uint32_t _size283; - ::apache::thrift::protocol::TType _etype286; - xfer += iprot->readListBegin(_etype286, _size283); - this->schema.resize(_size283); - uint32_t _i287; - for (_i287 = 0; _i287 < _size283; ++_i287) + uint32_t _size287; + ::apache::thrift::protocol::TType _etype290; + xfer += iprot->readListBegin(_etype290, _size287); + this->schema.resize(_size287); + uint32_t _i291; + for (_i291 = 0; _i291 < _size287; ++_i291) { - xfer += this->schema[_i287].read(iprot); + xfer += this->schema[_i291].read(iprot); } xfer += iprot->readListEnd(); } @@ -7955,14 +8054,14 @@ uint32_t FileMetaData::read(::apache::thrift::protocol::TProtocol* iprot) { if (ftype == ::apache::thrift::protocol::T_LIST) { { this->row_groups.clear(); - uint32_t _size288; - ::apache::thrift::protocol::TType _etype291; - xfer += iprot->readListBegin(_etype291, _size288); - this->row_groups.resize(_size288); - uint32_t _i292; - for (_i292 = 0; _i292 < _size288; ++_i292) + uint32_t _size292; + ::apache::thrift::protocol::TType _etype295; + xfer += iprot->readListBegin(_etype295, _size292); + this->row_groups.resize(_size292); + uint32_t _i296; + for (_i296 = 0; _i296 < _size292; ++_i296) { - xfer += this->row_groups[_i292].read(iprot); + xfer += this->row_groups[_i296].read(iprot); } xfer += iprot->readListEnd(); } @@ -7975,14 +8074,14 @@ uint32_t FileMetaData::read(::apache::thrift::protocol::TProtocol* iprot) { if (ftype == ::apache::thrift::protocol::T_LIST) { { this->key_value_metadata.clear(); - uint32_t _size293; - ::apache::thrift::protocol::TType _etype296; - xfer += iprot->readListBegin(_etype296, _size293); - this->key_value_metadata.resize(_size293); - uint32_t _i297; - for (_i297 = 0; _i297 < _size293; ++_i297) + uint32_t _size297; + ::apache::thrift::protocol::TType _etype300; + xfer += iprot->readListBegin(_etype300, _size297); + this->key_value_metadata.resize(_size297); + uint32_t _i301; + for (_i301 = 0; _i301 < _size297; ++_i301) { - xfer += this->key_value_metadata[_i297].read(iprot); + xfer += this->key_value_metadata[_i301].read(iprot); } xfer += iprot->readListEnd(); } @@ -8003,14 +8102,14 @@ uint32_t FileMetaData::read(::apache::thrift::protocol::TProtocol* iprot) { if (ftype == ::apache::thrift::protocol::T_LIST) { { this->column_orders.clear(); - uint32_t _size298; - ::apache::thrift::protocol::TType _etype301; - xfer += iprot->readListBegin(_etype301, _size298); - this->column_orders.resize(_size298); - uint32_t _i302; - for (_i302 = 0; _i302 < _size298; ++_i302) + uint32_t _size302; + ::apache::thrift::protocol::TType _etype305; + xfer += iprot->readListBegin(_etype305, _size302); + this->column_orders.resize(_size302); + uint32_t _i306; + for (_i306 = 0; _i306 < _size302; ++_i306) { - xfer += this->column_orders[_i302].read(iprot); + xfer += this->column_orders[_i306].read(iprot); } xfer += iprot->readListEnd(); } @@ -8067,10 +8166,10 @@ uint32_t FileMetaData::write(::apache::thrift::protocol::TProtocol* oprot) const xfer += oprot->writeFieldBegin("schema", ::apache::thrift::protocol::T_LIST, 2); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast(this->schema.size())); - std::vector ::const_iterator _iter303; - for (_iter303 = this->schema.begin(); _iter303 != this->schema.end(); ++_iter303) + std::vector ::const_iterator _iter307; + for (_iter307 = this->schema.begin(); _iter307 != this->schema.end(); ++_iter307) { - xfer += (*_iter303).write(oprot); + xfer += (*_iter307).write(oprot); } xfer += oprot->writeListEnd(); } @@ -8083,10 +8182,10 @@ uint32_t FileMetaData::write(::apache::thrift::protocol::TProtocol* oprot) const xfer += oprot->writeFieldBegin("row_groups", ::apache::thrift::protocol::T_LIST, 4); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast(this->row_groups.size())); - std::vector ::const_iterator _iter304; - for (_iter304 = this->row_groups.begin(); _iter304 != this->row_groups.end(); ++_iter304) + std::vector ::const_iterator _iter308; + for (_iter308 = this->row_groups.begin(); _iter308 != this->row_groups.end(); ++_iter308) { - xfer += (*_iter304).write(oprot); + xfer += (*_iter308).write(oprot); } xfer += oprot->writeListEnd(); } @@ -8096,10 +8195,10 @@ uint32_t FileMetaData::write(::apache::thrift::protocol::TProtocol* oprot) const xfer += oprot->writeFieldBegin("key_value_metadata", ::apache::thrift::protocol::T_LIST, 5); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast(this->key_value_metadata.size())); - std::vector ::const_iterator _iter305; - for (_iter305 = this->key_value_metadata.begin(); _iter305 != this->key_value_metadata.end(); ++_iter305) + std::vector ::const_iterator _iter309; + for (_iter309 = this->key_value_metadata.begin(); _iter309 != this->key_value_metadata.end(); ++_iter309) { - xfer += (*_iter305).write(oprot); + xfer += (*_iter309).write(oprot); } xfer += oprot->writeListEnd(); } @@ -8114,10 +8213,10 @@ uint32_t FileMetaData::write(::apache::thrift::protocol::TProtocol* oprot) const xfer += oprot->writeFieldBegin("column_orders", ::apache::thrift::protocol::T_LIST, 7); { xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast(this->column_orders.size())); - std::vector ::const_iterator _iter306; - for (_iter306 = this->column_orders.begin(); _iter306 != this->column_orders.end(); ++_iter306) + std::vector ::const_iterator _iter310; + for (_iter310 = this->column_orders.begin(); _iter310 != this->column_orders.end(); ++_iter310) { - xfer += (*_iter306).write(oprot); + xfer += (*_iter310).write(oprot); } xfer += oprot->writeListEnd(); } @@ -8152,54 +8251,54 @@ void swap(FileMetaData &a, FileMetaData &b) { swap(a.__isset, b.__isset); } -FileMetaData::FileMetaData(const FileMetaData& other307) { - version = other307.version; - schema = other307.schema; - num_rows = other307.num_rows; - row_groups = other307.row_groups; - key_value_metadata = other307.key_value_metadata; - created_by = other307.created_by; - column_orders = other307.column_orders; - encryption_algorithm = other307.encryption_algorithm; - footer_signing_key_metadata = other307.footer_signing_key_metadata; - __isset = other307.__isset; -} -FileMetaData::FileMetaData(FileMetaData&& other308) noexcept { - version = other308.version; - schema = std::move(other308.schema); - num_rows = other308.num_rows; - row_groups = std::move(other308.row_groups); - key_value_metadata = std::move(other308.key_value_metadata); - created_by = std::move(other308.created_by); - column_orders = std::move(other308.column_orders); - encryption_algorithm = std::move(other308.encryption_algorithm); - footer_signing_key_metadata = std::move(other308.footer_signing_key_metadata); - __isset = other308.__isset; -} -FileMetaData& FileMetaData::operator=(const FileMetaData& other309) { - version = other309.version; - schema = other309.schema; - num_rows = other309.num_rows; - row_groups = other309.row_groups; - key_value_metadata = other309.key_value_metadata; - created_by = other309.created_by; - column_orders = other309.column_orders; - encryption_algorithm = other309.encryption_algorithm; - footer_signing_key_metadata = other309.footer_signing_key_metadata; - __isset = other309.__isset; +FileMetaData::FileMetaData(const FileMetaData& other311) { + version = other311.version; + schema = other311.schema; + num_rows = other311.num_rows; + row_groups = other311.row_groups; + key_value_metadata = other311.key_value_metadata; + created_by = other311.created_by; + column_orders = other311.column_orders; + encryption_algorithm = other311.encryption_algorithm; + footer_signing_key_metadata = other311.footer_signing_key_metadata; + __isset = other311.__isset; +} +FileMetaData::FileMetaData(FileMetaData&& other312) noexcept { + version = other312.version; + schema = std::move(other312.schema); + num_rows = other312.num_rows; + row_groups = std::move(other312.row_groups); + key_value_metadata = std::move(other312.key_value_metadata); + created_by = std::move(other312.created_by); + column_orders = std::move(other312.column_orders); + encryption_algorithm = std::move(other312.encryption_algorithm); + footer_signing_key_metadata = std::move(other312.footer_signing_key_metadata); + __isset = other312.__isset; +} +FileMetaData& FileMetaData::operator=(const FileMetaData& other313) { + version = other313.version; + schema = other313.schema; + num_rows = other313.num_rows; + row_groups = other313.row_groups; + key_value_metadata = other313.key_value_metadata; + created_by = other313.created_by; + column_orders = other313.column_orders; + encryption_algorithm = other313.encryption_algorithm; + footer_signing_key_metadata = other313.footer_signing_key_metadata; + __isset = other313.__isset; return *this; } -FileMetaData& FileMetaData::operator=(FileMetaData&& other310) noexcept { - version = other310.version; - schema = std::move(other310.schema); - num_rows = other310.num_rows; - row_groups = std::move(other310.row_groups); - key_value_metadata = std::move(other310.key_value_metadata); - created_by = std::move(other310.created_by); - column_orders = std::move(other310.column_orders); - encryption_algorithm = std::move(other310.encryption_algorithm); - footer_signing_key_metadata = std::move(other310.footer_signing_key_metadata); - __isset = other310.__isset; +FileMetaData& FileMetaData::operator=(FileMetaData&& other314) noexcept { + version = other314.version; + schema = std::move(other314.schema); + num_rows = other314.num_rows; + row_groups = std::move(other314.row_groups); + key_value_metadata = std::move(other314.key_value_metadata); + created_by = std::move(other314.created_by); + column_orders = std::move(other314.column_orders); + encryption_algorithm = std::move(other314.encryption_algorithm); + footer_signing_key_metadata = std::move(other314.footer_signing_key_metadata); + __isset = other314.__isset; return *this; } void FileMetaData::printTo(std::ostream& out) const { @@ -8315,26 +8414,26 @@ void swap(FileCryptoMetaData &a, FileCryptoMetaData &b) { swap(a.__isset, b.__isset); } -FileCryptoMetaData::FileCryptoMetaData(const FileCryptoMetaData& other311) { - encryption_algorithm = other311.encryption_algorithm; - key_metadata = other311.key_metadata; - __isset = other311.__isset; +FileCryptoMetaData::FileCryptoMetaData(const FileCryptoMetaData& other315) { + encryption_algorithm = other315.encryption_algorithm; + key_metadata = other315.key_metadata; + __isset = other315.__isset; } -FileCryptoMetaData::FileCryptoMetaData(FileCryptoMetaData&& other312) noexcept { - encryption_algorithm = std::move(other312.encryption_algorithm); - key_metadata = std::move(other312.key_metadata); - __isset = other312.__isset; +FileCryptoMetaData::FileCryptoMetaData(FileCryptoMetaData&& other316) noexcept { + encryption_algorithm = std::move(other316.encryption_algorithm); + key_metadata = std::move(other316.key_metadata); + __isset = other316.__isset; } -FileCryptoMetaData& FileCryptoMetaData::operator=(const FileCryptoMetaData& other313) { - encryption_algorithm = other313.encryption_algorithm; - key_metadata = other313.key_metadata; - __isset = other313.__isset; +FileCryptoMetaData& FileCryptoMetaData::operator=(const FileCryptoMetaData& other317) { + encryption_algorithm = other317.encryption_algorithm; + key_metadata = other317.key_metadata; + __isset = other317.__isset; return *this; } -FileCryptoMetaData& FileCryptoMetaData::operator=(FileCryptoMetaData&& other314) noexcept { - encryption_algorithm = std::move(other314.encryption_algorithm); - key_metadata = std::move(other314.key_metadata); - __isset = other314.__isset; +FileCryptoMetaData& FileCryptoMetaData::operator=(FileCryptoMetaData&& other318) noexcept { + encryption_algorithm = std::move(other318.encryption_algorithm); + key_metadata = std::move(other318.key_metadata); + __isset = other318.__isset; return *this; } void FileCryptoMetaData::printTo(std::ostream& out) const { diff --git a/cpp/src/generated/parquet_types.h b/cpp/src/generated/parquet_types.h index 9f468b5051db3..199b4ae747667 100644 --- a/cpp/src/generated/parquet_types.h +++ b/cpp/src/generated/parquet_types.h @@ -359,6 +359,8 @@ class EnumType; class DateType; +class Float16Type; + class NullType; class DecimalType; @@ -770,6 +772,39 @@ void swap(DateType &a, DateType &b); std::ostream& operator<<(std::ostream& out, const DateType& obj); +class Float16Type : public virtual ::apache::thrift::TBase { + public: + + Float16Type(const Float16Type&) noexcept; + Float16Type(Float16Type&&) noexcept; + Float16Type& operator=(const Float16Type&) noexcept; + Float16Type& operator=(Float16Type&&) noexcept; + Float16Type() noexcept { + } + + virtual ~Float16Type() noexcept; + + bool operator == (const Float16Type & /* rhs */) const + { + return true; + } + bool operator != (const Float16Type &rhs) const { + return !(*this == rhs); + } + + bool operator < (const Float16Type & ) const; + + uint32_t read(::apache::thrift::protocol::TProtocol* iprot) override; + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const override; + + virtual void printTo(std::ostream& out) const; +}; + +void swap(Float16Type &a, Float16Type &b); + +std::ostream& operator<<(std::ostream& out, const Float16Type& obj); + + /** * Logical type to annotate a column that is always null. * @@ -1253,7 +1288,7 @@ void swap(BsonType &a, BsonType &b); std::ostream& operator<<(std::ostream& out, const BsonType& obj); typedef struct _LogicalType__isset { - _LogicalType__isset() : STRING(false), MAP(false), LIST(false), ENUM(false), DECIMAL(false), DATE(false), TIME(false), TIMESTAMP(false), INTEGER(false), UNKNOWN(false), JSON(false), BSON(false), UUID(false) {} + _LogicalType__isset() : STRING(false), MAP(false), LIST(false), ENUM(false), DECIMAL(false), DATE(false), TIME(false), TIMESTAMP(false), INTEGER(false), UNKNOWN(false), JSON(false), BSON(false), UUID(false), FLOAT16(false) {} bool STRING :1; bool MAP :1; bool LIST :1; @@ -1267,6 +1302,7 @@ typedef struct _LogicalType__isset { bool JSON :1; bool BSON :1; bool UUID :1; + bool FLOAT16 :1; } _LogicalType__isset; /** @@ -1300,6 +1336,7 @@ class LogicalType : public virtual ::apache::thrift::TBase { JsonType JSON; BsonType BSON; UUIDType UUID; + Float16Type FLOAT16; _LogicalType__isset __isset; @@ -1329,6 +1366,8 @@ class LogicalType : public virtual ::apache::thrift::TBase { void __set_UUID(const UUIDType& val); + void __set_FLOAT16(const Float16Type& val); + bool operator == (const LogicalType & rhs) const { if (__isset.STRING != rhs.__isset.STRING) @@ -1383,6 +1422,10 @@ class LogicalType : public virtual ::apache::thrift::TBase { return false; else if (__isset.UUID && !(UUID == rhs.UUID)) return false; + if (__isset.FLOAT16 != rhs.__isset.FLOAT16) + return false; + else if (__isset.FLOAT16 && !(FLOAT16 == rhs.FLOAT16)) + return false; return true; } bool operator != (const LogicalType &rhs) const { diff --git a/cpp/src/parquet/CMakeLists.txt b/cpp/src/parquet/CMakeLists.txt index 0d04ec3e306e5..04028431ba157 100644 --- a/cpp/src/parquet/CMakeLists.txt +++ b/cpp/src/parquet/CMakeLists.txt @@ -46,13 +46,15 @@ function(ADD_PARQUET_TEST REL_TEST_NAME) if(ARROW_TEST_LINKAGE STREQUAL "static") add_test_case(${REL_TEST_NAME} STATIC_LINK_LIBS - ${PARQUET_STATIC_TEST_LINK_LIBS} + parquet_static + ${PARQUET_TEST_LINK_LIBS} ${TEST_ARGUMENTS} ${ARG_UNPARSED_ARGUMENTS}) else() add_test_case(${REL_TEST_NAME} STATIC_LINK_LIBS - ${PARQUET_SHARED_TEST_LINK_LIBS} + parquet_shared + ${PARQUET_TEST_LINK_LIBS} ${TEST_ARGUMENTS} ${ARG_UNPARSED_ARGUMENTS}) endif() @@ -118,28 +120,20 @@ endfunction() if(ARROW_BUILD_STATIC) set(PARQUET_STATIC_LINK_LIBS arrow_static ${ARROW_STATIC_LINK_LIBS}) set(PARQUET_STATIC_INSTALL_INTERFACE_LIBS Arrow::arrow_static) - set(ARROW_LIBRARIES_FOR_STATIC_TESTS arrow_testing_static arrow_static - ${ARROW_STATIC_LINK_LIBS}) else() set(PARQUET_STATIC_INSTALL_INTERFACE_LIBS) - set(ARROW_LIBRARIES_FOR_STATIC_TESTS arrow_testing_shared arrow_shared) endif() -set(PARQUET_MIN_TEST_LIBS ${ARROW_GTEST_GMOCK} ${ARROW_GTEST_GTEST} - ${ARROW_GTEST_GTEST_MAIN} Boost::headers) - +set(PARQUET_TEST_LINK_LIBS ${ARROW_TEST_LINK_LIBS} thrift::thrift Boost::headers) if(APPLE) - list(APPEND PARQUET_MIN_TEST_LIBS ${CMAKE_DL_LIBS}) + list(APPEND PARQUET_TEST_LINK_LIBS ${CMAKE_DL_LIBS}) elseif(NOT MSVC) - list(APPEND PARQUET_MIN_TEST_LIBS pthread ${CMAKE_DL_LIBS}) + if(ARROW_ENABLE_THREADING) + list(APPEND PARQUET_TEST_LINK_LIBS Threads::Threads) + endif() + list(APPEND PARQUET_TEST_LINK_LIBS ${CMAKE_DL_LIBS}) endif() -set(PARQUET_SHARED_TEST_LINK_LIBS arrow_testing_shared ${PARQUET_MIN_TEST_LIBS} - parquet_shared thrift::thrift) - -set(PARQUET_STATIC_TEST_LINK_LIBS ${PARQUET_MIN_TEST_LIBS} parquet_static thrift::thrift - ${ARROW_LIBRARIES_FOR_STATIC_TESTS}) - # # Generated Thrift sources set(PARQUET_THRIFT_SOURCE_DIR "${ARROW_SOURCE_DIR}/src/generated/") @@ -302,15 +296,16 @@ if(WIN32 AND NOT (ARROW_TEST_LINKAGE STREQUAL "static")) "${PARQUET_THRIFT_SOURCE_DIR}/parquet_constants.cpp" "${PARQUET_THRIFT_SOURCE_DIR}/parquet_types.cpp") target_link_libraries(parquet_test_support thrift::thrift) - set(PARQUET_SHARED_TEST_LINK_LIBS ${PARQUET_SHARED_TEST_LINK_LIBS} parquet_test_support) - set(PARQUET_LIBRARIES ${PARQUET_LIBRARIES} parquet_test_support) + list(PREPEND PARQUET_TEST_LINK_LIBS parquet_test_support) + list(APPEND PARQUET_LIBRARIES parquet_test_support) endif() if(NOT ARROW_BUILD_SHARED) - set(PARQUET_BENCHMARK_LINK_OPTION STATIC_LINK_LIBS benchmark::benchmark_main - ${PARQUET_STATIC_TEST_LINK_LIBS}) + set(PARQUET_BENCHMARK_LINK_OPTION STATIC_LINK_LIBS parquet_static + ${PARQUET_TEST_LINK_LIBS} benchmark::benchmark_main) else() - set(PARQUET_BENCHMARK_LINK_OPTION EXTRA_LINK_LIBS ${PARQUET_SHARED_TEST_LINK_LIBS}) + set(PARQUET_BENCHMARK_LINK_OPTION EXTRA_LINK_LIBS parquet_shared + ${PARQUET_TEST_LINK_LIBS}) endif() if(ARROW_BUILD_STATIC AND WIN32) diff --git a/cpp/src/parquet/arrow/arrow_reader_writer_test.cc b/cpp/src/parquet/arrow/arrow_reader_writer_test.cc index 315405ef1e569..a314ecbf747e7 100644 --- a/cpp/src/parquet/arrow/arrow_reader_writer_test.cc +++ b/cpp/src/parquet/arrow/arrow_reader_writer_test.cc @@ -143,6 +143,8 @@ std::shared_ptr get_logical_type(const DataType& type) { return LogicalType::Date(); case ArrowId::DATE64: return LogicalType::Date(); + case ArrowId::HALF_FLOAT: + return LogicalType::Float16(); case ArrowId::TIMESTAMP: { const auto& ts_type = static_cast(type); const bool adjusted_to_utc = !(ts_type.timezone().empty()); @@ -220,6 +222,7 @@ ParquetType::type get_physical_type(const DataType& type) { case ArrowId::FIXED_SIZE_BINARY: case ArrowId::DECIMAL128: case ArrowId::DECIMAL256: + case ArrowId::HALF_FLOAT: return ParquetType::FIXED_LEN_BYTE_ARRAY; case ArrowId::DATE32: return ParquetType::INT32; @@ -525,6 +528,9 @@ static std::shared_ptr MakeSimpleSchema(const DataType& type, byte_width = static_cast(values_type).byte_width(); break; + case ::arrow::Type::HALF_FLOAT: + byte_width = sizeof(::arrow::HalfFloatType::c_type); + break; case ::arrow::Type::DECIMAL128: case ::arrow::Type::DECIMAL256: { const auto& decimal_type = static_cast(values_type); @@ -537,6 +543,9 @@ static std::shared_ptr MakeSimpleSchema(const DataType& type, case ::arrow::Type::FIXED_SIZE_BINARY: byte_width = static_cast(type).byte_width(); break; + case ::arrow::Type::HALF_FLOAT: + byte_width = sizeof(::arrow::HalfFloatType::c_type); + break; case ::arrow::Type::DECIMAL128: case ::arrow::Type::DECIMAL256: { const auto& decimal_type = static_cast(type); @@ -840,12 +849,12 @@ typedef ::testing::Types< ::arrow::BooleanType, ::arrow::UInt8Type, ::arrow::Int8Type, ::arrow::UInt16Type, ::arrow::Int16Type, ::arrow::Int32Type, ::arrow::UInt64Type, ::arrow::Int64Type, ::arrow::Date32Type, ::arrow::FloatType, ::arrow::DoubleType, ::arrow::StringType, - ::arrow::BinaryType, ::arrow::FixedSizeBinaryType, DecimalWithPrecisionAndScale<1>, - DecimalWithPrecisionAndScale<5>, DecimalWithPrecisionAndScale<10>, - DecimalWithPrecisionAndScale<19>, DecimalWithPrecisionAndScale<23>, - DecimalWithPrecisionAndScale<27>, DecimalWithPrecisionAndScale<38>, - Decimal256WithPrecisionAndScale<39>, Decimal256WithPrecisionAndScale<56>, - Decimal256WithPrecisionAndScale<76>> + ::arrow::BinaryType, ::arrow::FixedSizeBinaryType, ::arrow::HalfFloatType, + DecimalWithPrecisionAndScale<1>, DecimalWithPrecisionAndScale<5>, + DecimalWithPrecisionAndScale<10>, DecimalWithPrecisionAndScale<19>, + DecimalWithPrecisionAndScale<23>, DecimalWithPrecisionAndScale<27>, + DecimalWithPrecisionAndScale<38>, Decimal256WithPrecisionAndScale<39>, + Decimal256WithPrecisionAndScale<56>, Decimal256WithPrecisionAndScale<76>> TestTypes; TYPED_TEST_SUITE(TestParquetIO, TestTypes); @@ -916,9 +925,15 @@ TYPED_TEST(TestParquetIO, SingleColumnOptionalReadWrite) { } TYPED_TEST(TestParquetIO, SingleColumnOptionalDictionaryWrite) { - // Skip tests for BOOL as we don't create dictionaries for it. - if (TypeParam::type_id == ::arrow::Type::BOOL) { - return; + switch (TypeParam::type_id) { + case ::arrow::Type::BOOL: + GTEST_SKIP() << "dictionaries not created for BOOL"; + break; + case ::arrow::Type::HALF_FLOAT: + GTEST_SKIP() << "dictionary_encode not supported for HALF_FLOAT"; + break; + default: + break; } std::shared_ptr values; @@ -3374,6 +3389,8 @@ TEST(ArrowReadWrite, NestedRequiredOuterOptional) { for (const auto& inner_type : types) { if (inner_type->id() == ::arrow::Type::NA) continue; + if (inner_type->id() == ::arrow::Type::BINARY_VIEW) continue; + if (inner_type->id() == ::arrow::Type::STRING_VIEW) continue; auto writer_props = WriterProperties::Builder(); auto arrow_writer_props = ArrowWriterProperties::Builder(); @@ -3389,7 +3406,6 @@ TEST(ArrowReadWrite, NestedRequiredOuterOptional) { arrow_writer_props.coerce_timestamps(unit); } } - ASSERT_NO_FATAL_FAILURE(DoNestedRequiredRoundtrip(inner_type, writer_props.build(), arrow_writer_props.build())); @@ -5205,6 +5221,33 @@ TEST(TestArrowReadWrite, FuzzReader) { } } +// Test writing table with a closed writer, should not segfault (GH-37969). +TEST(TestArrowReadWrite, OperationsOnClosedWriter) { + // A sample table, type and structure does not matter in this test case + auto schema = ::arrow::schema({::arrow::field("letter", ::arrow::utf8())}); + auto table = ::arrow::Table::Make( + schema, {::arrow::ArrayFromJSON(::arrow::utf8(), R"(["a", "b", "c"])")}); + + auto sink = CreateOutputStream(); + ASSERT_OK_AND_ASSIGN(auto writer, parquet::arrow::FileWriter::Open( + *schema, ::arrow::default_memory_pool(), sink, + parquet::default_writer_properties(), + parquet::default_arrow_writer_properties())); + + // Should be ok + ASSERT_OK(writer->WriteTable(*table, 1)); + + // Operations on closed writer are invalid + ASSERT_OK(writer->Close()); + + ASSERT_RAISES(Invalid, writer->NewRowGroup(1)); + ASSERT_RAISES(Invalid, writer->WriteColumnChunk(table->column(0), 0, 1)); + ASSERT_RAISES(Invalid, writer->NewBufferedRowGroup()); + ASSERT_OK_AND_ASSIGN(auto record_batch, table->CombineChunksToBatch()); + ASSERT_RAISES(Invalid, writer->WriteRecordBatch(*record_batch)); + ASSERT_RAISES(Invalid, writer->WriteTable(*table, 1)); +} + namespace { struct ColumnIndexObject { diff --git a/cpp/src/parquet/arrow/arrow_schema_test.cc b/cpp/src/parquet/arrow/arrow_schema_test.cc index f11101eb24298..5443214f930d7 100644 --- a/cpp/src/parquet/arrow/arrow_schema_test.cc +++ b/cpp/src/parquet/arrow/arrow_schema_test.cc @@ -236,6 +236,8 @@ TEST_F(TestConvertParquetSchema, ParquetAnnotatedFields) { ::arrow::fixed_size_binary(12)}, {"uuid", LogicalType::UUID(), ParquetType::FIXED_LEN_BYTE_ARRAY, 16, ::arrow::fixed_size_binary(16)}, + {"float16", LogicalType::Float16(), ParquetType::FIXED_LEN_BYTE_ARRAY, 2, + ::arrow::float16()}, {"none", LogicalType::None(), ParquetType::BOOLEAN, -1, ::arrow::boolean()}, {"none", LogicalType::None(), ParquetType::INT32, -1, ::arrow::int32()}, {"none", LogicalType::None(), ParquetType::INT64, -1, ::arrow::int64()}, @@ -851,6 +853,8 @@ TEST_F(TestConvertArrowSchema, ArrowFields) { ParquetType::FIXED_LEN_BYTE_ARRAY, 7}, {"decimal(32, 8)", ::arrow::decimal(32, 8), LogicalType::Decimal(32, 8), ParquetType::FIXED_LEN_BYTE_ARRAY, 14}, + {"float16", ::arrow::float16(), LogicalType::Float16(), + ParquetType::FIXED_LEN_BYTE_ARRAY, 2}, {"time32", ::arrow::time32(::arrow::TimeUnit::MILLI), LogicalType::Time(true, LogicalType::TimeUnit::MILLIS), ParquetType::INT32, -1}, {"time64(microsecond)", ::arrow::time64(::arrow::TimeUnit::MICRO), @@ -913,7 +917,8 @@ TEST_F(TestConvertArrowSchema, ArrowNonconvertibleFields) { }; std::vector cases = { - {"float16", ::arrow::float16()}, + {"run_end_encoded", + ::arrow::run_end_encoded(::arrow::int32(), ::arrow::list(::arrow::int8()))}, }; for (const FieldConstructionArguments& c : cases) { diff --git a/cpp/src/parquet/arrow/reader_internal.cc b/cpp/src/parquet/arrow/reader_internal.cc index 5146aa12c2c36..e5aef5a45b5f3 100644 --- a/cpp/src/parquet/arrow/reader_internal.cc +++ b/cpp/src/parquet/arrow/reader_internal.cc @@ -42,6 +42,7 @@ #include "arrow/util/bit_util.h" #include "arrow/util/checked_cast.h" #include "arrow/util/endian.h" +#include "arrow/util/float16.h" #include "arrow/util/int_util_overflow.h" #include "arrow/util/logging.h" #include "arrow/util/ubsan.h" @@ -82,6 +83,7 @@ using ::arrow::bit_util::FromBigEndian; using ::arrow::internal::checked_cast; using ::arrow::internal::checked_pointer_cast; using ::arrow::internal::SafeLeftShift; +using ::arrow::util::Float16; using ::arrow::util::SafeLoadAs; using parquet::internal::BinaryRecordReader; @@ -713,6 +715,17 @@ Status TransferDecimal(RecordReader* reader, MemoryPool* pool, return Status::OK(); } +Status TransferHalfFloat(RecordReader* reader, MemoryPool* pool, + const std::shared_ptr& field, Datum* out) { + static const auto binary_type = ::arrow::fixed_size_binary(2); + // Read as a FixedSizeBinaryArray - then, view as a HalfFloatArray + std::shared_ptr chunked_array; + RETURN_NOT_OK( + TransferBinary(reader, pool, field->WithType(binary_type), &chunked_array)); + ARROW_ASSIGN_OR_RAISE(*out, chunked_array->View(field->type())); + return Status::OK(); +} + } // namespace #define TRANSFER_INT32(ENUM, ArrowType) \ @@ -772,6 +785,18 @@ Status TransferColumnData(RecordReader* reader, const std::shared_ptr& va RETURN_NOT_OK(TransferBinary(reader, pool, value_field, &chunked_result)); result = chunked_result; } break; + case ::arrow::Type::HALF_FLOAT: { + const auto& type = *value_field->type(); + if (descr->physical_type() != ::parquet::Type::FIXED_LEN_BYTE_ARRAY) { + return Status::Invalid("Physical type for ", type.ToString(), + " must be fixed length binary"); + } + if (descr->type_length() != type.byte_width()) { + return Status::Invalid("Fixed length binary type for ", type.ToString(), + " must have a byte width of ", type.byte_width()); + } + RETURN_NOT_OK(TransferHalfFloat(reader, pool, value_field, &result)); + } break; case ::arrow::Type::DECIMAL128: { switch (descr->physical_type()) { case ::parquet::Type::INT32: { diff --git a/cpp/src/parquet/arrow/schema.cc b/cpp/src/parquet/arrow/schema.cc index 3323b7ff8b608..f5484f131eb07 100644 --- a/cpp/src/parquet/arrow/schema.cc +++ b/cpp/src/parquet/arrow/schema.cc @@ -397,6 +397,11 @@ Status FieldToNode(const std::string& name, const std::shared_ptr& field, case ArrowTypeId::DURATION: type = ParquetType::INT64; break; + case ArrowTypeId::HALF_FLOAT: + type = ParquetType::FIXED_LEN_BYTE_ARRAY; + logical_type = LogicalType::Float16(); + length = sizeof(uint16_t); + break; case ArrowTypeId::STRUCT: { auto struct_type = std::static_pointer_cast<::arrow::StructType>(field->type()); return StructToNode(struct_type, name, field->nullable(), field_id, properties, diff --git a/cpp/src/parquet/arrow/schema_internal.cc b/cpp/src/parquet/arrow/schema_internal.cc index da0427cb31000..bb75cce084097 100644 --- a/cpp/src/parquet/arrow/schema_internal.cc +++ b/cpp/src/parquet/arrow/schema_internal.cc @@ -130,6 +130,8 @@ Result> FromFLBA(const LogicalType& logical_type, switch (logical_type.type()) { case LogicalType::Type::DECIMAL: return MakeArrowDecimal(logical_type); + case LogicalType::Type::FLOAT16: + return ::arrow::float16(); case LogicalType::Type::NONE: case LogicalType::Type::INTERVAL: case LogicalType::Type::UUID: diff --git a/cpp/src/parquet/arrow/test_util.h b/cpp/src/parquet/arrow/test_util.h index 16c03130c9672..b2be1b3c5354d 100644 --- a/cpp/src/parquet/arrow/test_util.h +++ b/cpp/src/parquet/arrow/test_util.h @@ -33,7 +33,9 @@ #include "arrow/type_fwd.h" #include "arrow/type_traits.h" #include "arrow/util/decimal.h" +#include "arrow/util/float16.h" #include "parquet/column_reader.h" +#include "parquet/test_util.h" namespace parquet { @@ -70,7 +72,14 @@ ::arrow::enable_if_floating_point NonNullArray( size_t size, std::shared_ptr* out) { using c_type = typename ArrowType::c_type; std::vector values; - ::arrow::random_real(size, 0, static_cast(0), static_cast(1), &values); + if constexpr (::arrow::is_half_float_type::value) { + values.resize(size); + test::random_float16_numbers(static_cast(size), 0, ::arrow::util::Float16(0.0f), + ::arrow::util::Float16(1.0f), values.data()); + } else { + ::arrow::random_real(size, 0, static_cast(0), static_cast(1), + &values); + } ::arrow::NumericBuilder builder; RETURN_NOT_OK(builder.AppendValues(values.data(), values.size())); return builder.Finish(out); @@ -201,8 +210,14 @@ ::arrow::enable_if_floating_point NullableArray( size_t size, size_t num_nulls, uint32_t seed, std::shared_ptr* out) { using c_type = typename ArrowType::c_type; std::vector values; - ::arrow::random_real(size, seed, static_cast(-1e10), static_cast(1e10), - &values); + if constexpr (::arrow::is_half_float_type::value) { + values.resize(size); + test::random_float16_numbers(static_cast(size), 0, ::arrow::util::Float16(-1e4f), + ::arrow::util::Float16(1e4f), values.data()); + } else { + ::arrow::random_real(size, seed, static_cast(-1e10), + static_cast(1e10), &values); + } std::vector valid_bytes(size, 1); for (size_t i = 0; i < num_nulls; i++) { diff --git a/cpp/src/parquet/arrow/writer.cc b/cpp/src/parquet/arrow/writer.cc index 0c67e8d6bb3d4..300a6d8e054cc 100644 --- a/cpp/src/parquet/arrow/writer.cc +++ b/cpp/src/parquet/arrow/writer.cc @@ -306,6 +306,7 @@ class FileWriterImpl : public FileWriter { } Status NewRowGroup(int64_t chunk_size) override { + RETURN_NOT_OK(CheckClosed()); if (row_group_writer_ != nullptr) { PARQUET_CATCH_NOT_OK(row_group_writer_->Close()); } @@ -325,6 +326,13 @@ class FileWriterImpl : public FileWriter { return Status::OK(); } + Status CheckClosed() const { + if (closed_) { + return Status::Invalid("Operation on closed file"); + } + return Status::OK(); + } + Status WriteColumnChunk(const Array& data) override { // A bit awkward here since cannot instantiate ChunkedArray from const Array& auto chunk = ::arrow::MakeArray(data.data()); @@ -334,6 +342,7 @@ class FileWriterImpl : public FileWriter { Status WriteColumnChunk(const std::shared_ptr& data, int64_t offset, int64_t size) override { + RETURN_NOT_OK(CheckClosed()); if (arrow_properties_->engine_version() == ArrowWriterProperties::V2 || arrow_properties_->engine_version() == ArrowWriterProperties::V1) { if (row_group_writer_->buffered()) { @@ -356,6 +365,7 @@ class FileWriterImpl : public FileWriter { std::shared_ptr<::arrow::Schema> schema() const override { return schema_; } Status WriteTable(const Table& table, int64_t chunk_size) override { + RETURN_NOT_OK(CheckClosed()); RETURN_NOT_OK(table.Validate()); if (chunk_size <= 0 && table.num_rows() > 0) { @@ -392,6 +402,7 @@ class FileWriterImpl : public FileWriter { } Status NewBufferedRowGroup() override { + RETURN_NOT_OK(CheckClosed()); if (row_group_writer_ != nullptr) { PARQUET_CATCH_NOT_OK(row_group_writer_->Close()); } @@ -400,6 +411,7 @@ class FileWriterImpl : public FileWriter { } Status WriteRecordBatch(const RecordBatch& batch) override { + RETURN_NOT_OK(CheckClosed()); if (batch.num_rows() == 0) { return Status::OK(); } diff --git a/cpp/src/parquet/column_writer.cc b/cpp/src/parquet/column_writer.cc index 72e984d7736fa..a7e7b2f93e174 100644 --- a/cpp/src/parquet/column_writer.cc +++ b/cpp/src/parquet/column_writer.cc @@ -39,6 +39,7 @@ #include "arrow/util/compression.h" #include "arrow/util/crc32.h" #include "arrow/util/endian.h" +#include "arrow/util/float16.h" #include "arrow/util/logging.h" #include "arrow/util/rle_encoding.h" #include "arrow/util/type_traits.h" @@ -65,6 +66,7 @@ using arrow::Status; using arrow::bit_util::BitWriter; using arrow::internal::checked_cast; using arrow::internal::checked_pointer_cast; +using arrow::util::Float16; using arrow::util::RleEncoder; namespace bit_util = arrow::bit_util; @@ -134,6 +136,8 @@ struct ValueBufferSlicer { NOT_IMPLEMENTED_VISIT(Dictionary); NOT_IMPLEMENTED_VISIT(RunEndEncoded); NOT_IMPLEMENTED_VISIT(Extension); + NOT_IMPLEMENTED_VISIT(BinaryView); + NOT_IMPLEMENTED_VISIT(StringView); #undef NOT_IMPLEMENTED_VISIT @@ -2293,6 +2297,33 @@ struct SerializeFunctor< int64_t* scratch; }; +// ---------------------------------------------------------------------- +// Write Arrow to Float16 + +// Requires a custom serializer because Float16s in Parquet are stored as a 2-byte +// (little-endian) FLBA, whereas in Arrow they're a native `uint16_t`. +template <> +struct SerializeFunctor<::parquet::FLBAType, ::arrow::HalfFloatType> { + Status Serialize(const ::arrow::HalfFloatArray& array, ArrowWriteContext*, FLBA* out) { + const uint16_t* values = array.raw_values(); + if (array.null_count() == 0) { + for (int64_t i = 0; i < array.length(); ++i) { + out[i] = ToFLBA(&values[i]); + } + } else { + for (int64_t i = 0; i < array.length(); ++i) { + out[i] = array.IsValid(i) ? ToFLBA(&values[i]) : FLBA{}; + } + } + return Status::OK(); + } + + private: + FLBA ToFLBA(const uint16_t* value_ptr) const { + return FLBA{reinterpret_cast(value_ptr)}; + } +}; + template <> Status TypedColumnWriterImpl::WriteArrowDense( const int16_t* def_levels, const int16_t* rep_levels, int64_t num_levels, @@ -2301,6 +2332,7 @@ Status TypedColumnWriterImpl::WriteArrowDense( WRITE_SERIALIZE_CASE(FIXED_SIZE_BINARY, FixedSizeBinaryType, FLBAType) WRITE_SERIALIZE_CASE(DECIMAL128, Decimal128Type, FLBAType) WRITE_SERIALIZE_CASE(DECIMAL256, Decimal256Type, FLBAType) + WRITE_SERIALIZE_CASE(HALF_FLOAT, HalfFloatType, FLBAType) default: break; } diff --git a/cpp/src/parquet/encoding.cc b/cpp/src/parquet/encoding.cc index 5221f2588c0d3..1bb487c20d3e2 100644 --- a/cpp/src/parquet/encoding.cc +++ b/cpp/src/parquet/encoding.cc @@ -37,7 +37,7 @@ #include "arrow/util/bit_util.h" #include "arrow/util/bitmap_ops.h" #include "arrow/util/bitmap_writer.h" -#include "arrow/util/byte_stream_split.h" +#include "arrow/util/byte_stream_split_internal.h" #include "arrow/util/checked_cast.h" #include "arrow/util/hashing.h" #include "arrow/util/int_util_overflow.h" @@ -850,8 +850,8 @@ std::shared_ptr ByteStreamSplitEncoder::FlushValues() { AllocateBuffer(this->memory_pool(), EstimatedDataEncodedSize()); uint8_t* output_buffer_raw = output_buffer->mutable_data(); const uint8_t* raw_values = sink_.data(); - ::arrow::util::internal::ByteStreamSplitEncode( - raw_values, static_cast(num_values_in_buffer_), output_buffer_raw); + ::arrow::util::internal::ByteStreamSplitEncode(raw_values, num_values_in_buffer_, + output_buffer_raw); sink_.Reset(); num_values_in_buffer_ = 0; return std::move(output_buffer); diff --git a/cpp/src/parquet/encoding_benchmark.cc b/cpp/src/parquet/encoding_benchmark.cc index 717c716330563..b5b6cc8d93e03 100644 --- a/cpp/src/parquet/encoding_benchmark.cc +++ b/cpp/src/parquet/encoding_benchmark.cc @@ -24,7 +24,7 @@ #include "arrow/testing/random.h" #include "arrow/testing/util.h" #include "arrow/type.h" -#include "arrow/util/byte_stream_split.h" +#include "arrow/util/byte_stream_split_internal.h" #include "arrow/visit_data_inline.h" #include "parquet/encoding.h" diff --git a/cpp/src/parquet/encryption/openssl_internal.cc b/cpp/src/parquet/encryption/openssl_internal.cc index 05f2773532353..61ec81b490802 100644 --- a/cpp/src/parquet/encryption/openssl_internal.cc +++ b/cpp/src/parquet/encryption/openssl_internal.cc @@ -24,11 +24,14 @@ namespace parquet::encryption::openssl { void EnsureInitialized() { +// OpenSSL 1.1 doesn't provide OPENSSL_INIT_ENGINE_ALL_BUILTIN. +#ifdef OPENSSL_INIT_ENGINE_ALL_BUILTIN // Initialize ciphers and random engines if (!OPENSSL_init_crypto(OPENSSL_INIT_ENGINE_ALL_BUILTIN | OPENSSL_INIT_ADD_ALL_CIPHERS, NULL)) { throw ParquetException("OpenSSL initialization failed"); } +#endif } } // namespace parquet::encryption::openssl diff --git a/cpp/src/parquet/encryption/read_configurations_test.cc b/cpp/src/parquet/encryption/read_configurations_test.cc index 695696db293fb..94fb6362269e2 100644 --- a/cpp/src/parquet/encryption/read_configurations_test.cc +++ b/cpp/src/parquet/encryption/read_configurations_test.cc @@ -203,17 +203,15 @@ class TestDecryptionConfiguration // Check that the decryption result is as expected. void CheckResults(const std::string& file_name, unsigned decryption_config_num, - unsigned encryption_config_num, bool file_has_page_index) { + unsigned encryption_config_num) { // Encryption_configuration number five contains aad_prefix and // disable_aad_prefix_storage. // An exception is expected to be thrown if the file is not decrypted with aad_prefix. if (encryption_config_num == 5) { if (decryption_config_num == 1 || decryption_config_num == 3) { EXPECT_THROW(DecryptFile(file_name, decryption_config_num - 1), ParquetException); - if (file_has_page_index) { - EXPECT_THROW(DecryptPageIndex(file_name, decryption_config_num - 1), - ParquetException); - } + EXPECT_THROW(DecryptPageIndex(file_name, decryption_config_num - 1), + ParquetException); return; } } @@ -222,10 +220,8 @@ class TestDecryptionConfiguration if (decryption_config_num == 2) { if (encryption_config_num != 5 && encryption_config_num != 4) { EXPECT_THROW(DecryptFile(file_name, decryption_config_num - 1), ParquetException); - if (file_has_page_index) { - EXPECT_THROW(DecryptPageIndex(file_name, decryption_config_num - 1), - ParquetException); - } + EXPECT_THROW(DecryptPageIndex(file_name, decryption_config_num - 1), + ParquetException); return; } } @@ -235,9 +231,7 @@ class TestDecryptionConfiguration return; } EXPECT_NO_THROW(DecryptFile(file_name, decryption_config_num - 1)); - if (file_has_page_index) { - EXPECT_NO_THROW(DecryptPageIndex(file_name, decryption_config_num - 1)); - } + EXPECT_NO_THROW(DecryptPageIndex(file_name, decryption_config_num - 1)); } // Returns true if file exists. Otherwise returns false. @@ -269,8 +263,7 @@ TEST_P(TestDecryptionConfiguration, TestDecryption) { // parquet file. for (unsigned index = 0; index < vector_of_decryption_configurations_.size(); ++index) { unsigned decryption_config_num = index + 1; - CheckResults(file_name, decryption_config_num, encryption_config_num, - /*file_has_page_index=*/true); + CheckResults(file_name, decryption_config_num, encryption_config_num); } // Delete temporary test file. ASSERT_EQ(std::remove(file_name.c_str()), 0); @@ -288,8 +281,7 @@ TEST_P(TestDecryptionConfiguration, TestDecryption) { // parquet file. for (unsigned index = 0; index < vector_of_decryption_configurations_.size(); ++index) { unsigned decryption_config_num = index + 1; - CheckResults(file_name, decryption_config_num, encryption_config_num, - /*file_has_page_index=*/false); + CheckResults(file_name, decryption_config_num, encryption_config_num); } } diff --git a/cpp/src/parquet/file_writer.cc b/cpp/src/parquet/file_writer.cc index 9a92d4525d23d..5502e1f94a9d0 100644 --- a/cpp/src/parquet/file_writer.cc +++ b/cpp/src/parquet/file_writer.cc @@ -656,7 +656,11 @@ void ParquetFileWriter::AddKeyValueMetadata( } const std::shared_ptr& ParquetFileWriter::properties() const { - return contents_->properties(); + if (contents_) { + return contents_->properties(); + } else { + throw ParquetException("Cannot get properties from closed file"); + } } } // namespace parquet diff --git a/cpp/src/parquet/page_index_test.cc b/cpp/src/parquet/page_index_test.cc index 5bfe38522af7b..4db49b4267415 100644 --- a/cpp/src/parquet/page_index_test.cc +++ b/cpp/src/parquet/page_index_test.cc @@ -21,6 +21,7 @@ #include #include "arrow/io/file.h" +#include "arrow/util/float16.h" #include "parquet/file_reader.h" #include "parquet/metadata.h" #include "parquet/schema.h" @@ -579,6 +580,27 @@ TEST(PageIndex, WriteFLBAColumnIndex) { /*has_null_counts=*/false); } +TEST(PageIndex, WriteFloat16ColumnIndex) { + using ::arrow::util::Float16; + auto encode = [](auto value) { + auto bytes = Float16(value).ToLittleEndian(); + return std::string(reinterpret_cast(bytes.data()), bytes.size()); + }; + + // Float16 (FLBA) values in the ascending order and without null count. + std::vector page_stats(4); + page_stats.at(0).set_min(encode(-1.3)).set_max(encode(+3.6)); + page_stats.at(1).set_min(encode(-0.2)).set_max(encode(+4.5)); + page_stats.at(2).set_min(encode(+1.1)).set_max(encode(+5.4)); + page_stats.at(3).set_min(encode(+2.0)).set_max(encode(+6.3)); + + auto node = schema::PrimitiveNode::Make( + "c1", Repetition::OPTIONAL, LogicalType::Float16(), Type::FIXED_LEN_BYTE_ARRAY, + /*length=*/2); + TestWriteTypedColumnIndex(std::move(node), page_stats, BoundaryOrder::Ascending, + /*has_null_counts=*/false); +} + TEST(PageIndex, WriteColumnIndexWithAllNullPages) { // All values are null. std::vector page_stats(3); diff --git a/cpp/src/parquet/parquet.thrift b/cpp/src/parquet/parquet.thrift index 88e44c96cc24c..d802166be66e8 100644 --- a/cpp/src/parquet/parquet.thrift +++ b/cpp/src/parquet/parquet.thrift @@ -234,6 +234,7 @@ struct MapType {} // see LogicalTypes.md struct ListType {} // see LogicalTypes.md struct EnumType {} // allowed for BINARY, must be encoded with UTF-8 struct DateType {} // allowed for INT32 +struct Float16Type{} // allowed for FIXED[2], must encode raw FLOAT16 bytes /** * Logical type to annotate a column that is always null. @@ -344,6 +345,7 @@ union LogicalType { 12: JsonType JSON // use ConvertedType JSON 13: BsonType BSON // use ConvertedType BSON 14: UUIDType UUID // no compatible ConvertedType + 15: Float16Type FLOAT16 // no compatible ConvertedType } /** diff --git a/cpp/src/parquet/schema_test.cc b/cpp/src/parquet/schema_test.cc index 603d9ed8e2124..a1b5557497d9c 100644 --- a/cpp/src/parquet/schema_test.cc +++ b/cpp/src/parquet/schema_test.cc @@ -1147,6 +1147,9 @@ TEST(TestLogicalTypeConstruction, NewTypeIncompatibility) { auto check_is_UUID = [](const std::shared_ptr& logical_type) { return logical_type->is_UUID(); }; + auto check_is_float16 = [](const std::shared_ptr& logical_type) { + return logical_type->is_float16(); + }; auto check_is_null = [](const std::shared_ptr& logical_type) { return logical_type->is_null(); }; @@ -1159,6 +1162,7 @@ TEST(TestLogicalTypeConstruction, NewTypeIncompatibility) { std::vector cases = { {LogicalType::UUID(), check_is_UUID}, + {LogicalType::Float16(), check_is_float16}, {LogicalType::Null(), check_is_null}, {LogicalType::Time(false, LogicalType::TimeUnit::MILLIS), check_is_time}, {LogicalType::Time(false, LogicalType::TimeUnit::MICROS), check_is_time}, @@ -1242,6 +1246,7 @@ TEST(TestLogicalTypeOperation, LogicalTypeProperties) { {JSONLogicalType::Make(), false, true, true}, {BSONLogicalType::Make(), false, true, true}, {UUIDLogicalType::Make(), false, true, true}, + {Float16LogicalType::Make(), false, true, true}, {NoLogicalType::Make(), false, false, true}, }; @@ -1351,7 +1356,8 @@ TEST(TestLogicalTypeOperation, LogicalTypeApplicability) { int physical_length; }; - std::vector inapplicable_types = {{Type::FIXED_LEN_BYTE_ARRAY, 8}, + std::vector inapplicable_types = {{Type::FIXED_LEN_BYTE_ARRAY, 1}, + {Type::FIXED_LEN_BYTE_ARRAY, 8}, {Type::FIXED_LEN_BYTE_ARRAY, 20}, {Type::BOOLEAN, -1}, {Type::INT32, -1}, @@ -1374,6 +1380,12 @@ TEST(TestLogicalTypeOperation, LogicalTypeApplicability) { for (const InapplicableType& t : inapplicable_types) { ASSERT_FALSE(logical_type->is_applicable(t.physical_type, t.physical_length)); } + + logical_type = LogicalType::Float16(); + ASSERT_TRUE(logical_type->is_applicable(Type::FIXED_LEN_BYTE_ARRAY, 2)); + for (const InapplicableType& t : inapplicable_types) { + ASSERT_FALSE(logical_type->is_applicable(t.physical_type, t.physical_length)); + } } TEST(TestLogicalTypeOperation, DecimalLogicalTypeApplicability) { @@ -1531,6 +1543,7 @@ TEST(TestLogicalTypeOperation, LogicalTypeRepresentation) { {LogicalType::JSON(), "JSON", R"({"Type": "JSON"})"}, {LogicalType::BSON(), "BSON", R"({"Type": "BSON"})"}, {LogicalType::UUID(), "UUID", R"({"Type": "UUID"})"}, + {LogicalType::Float16(), "Float16", R"({"Type": "Float16"})"}, {LogicalType::None(), "None", R"({"Type": "None"})"}, }; @@ -1580,6 +1593,7 @@ TEST(TestLogicalTypeOperation, LogicalTypeSortOrder) { {LogicalType::JSON(), SortOrder::UNSIGNED}, {LogicalType::BSON(), SortOrder::UNSIGNED}, {LogicalType::UUID(), SortOrder::UNSIGNED}, + {LogicalType::Float16(), SortOrder::SIGNED}, {LogicalType::None(), SortOrder::UNKNOWN}}; for (const ExpectedSortOrder& c : cases) { @@ -1712,6 +1726,15 @@ TEST(TestSchemaNodeCreation, FactoryExceptions) { ASSERT_ANY_THROW(PrimitiveNode::Make("uuid", Repetition::REQUIRED, UUIDLogicalType::Make(), Type::FIXED_LEN_BYTE_ARRAY, 64)); + + // Incompatible primitive type ... + ASSERT_ANY_THROW(PrimitiveNode::Make("float16", Repetition::REQUIRED, + Float16LogicalType::Make(), Type::BYTE_ARRAY, 2)); + // Incompatible primitive length ... + ASSERT_ANY_THROW(PrimitiveNode::Make("float16", Repetition::REQUIRED, + Float16LogicalType::Make(), + Type::FIXED_LEN_BYTE_ARRAY, 3)); + // Non-positive length argument for fixed length binary ... ASSERT_ANY_THROW(PrimitiveNode::Make("negative_length", Repetition::REQUIRED, NoLogicalType::Make(), Type::FIXED_LEN_BYTE_ARRAY, @@ -1902,6 +1925,9 @@ TEST_F(TestSchemaElementConstruction, SimpleCases) { [this]() { return element_->logicalType.__isset.BSON; }}, {"uuid", LogicalType::UUID(), Type::FIXED_LEN_BYTE_ARRAY, 16, false, ConvertedType::NA, true, [this]() { return element_->logicalType.__isset.UUID; }}, + {"float16", LogicalType::Float16(), Type::FIXED_LEN_BYTE_ARRAY, 2, false, + ConvertedType::NA, true, + [this]() { return element_->logicalType.__isset.FLOAT16; }}, {"none", LogicalType::None(), Type::INT64, -1, false, ConvertedType::NA, false, check_nothing}}; @@ -2238,6 +2264,7 @@ TEST(TestLogicalTypeSerialization, Roundtrips) { {LogicalType::JSON(), Type::BYTE_ARRAY, -1}, {LogicalType::BSON(), Type::BYTE_ARRAY, -1}, {LogicalType::UUID(), Type::FIXED_LEN_BYTE_ARRAY, 16}, + {LogicalType::Float16(), Type::FIXED_LEN_BYTE_ARRAY, 2}, {LogicalType::None(), Type::BOOLEAN, -1}}; for (const AnnotatedPrimitiveNodeFactoryArguments& c : cases) { diff --git a/cpp/src/parquet/statistics.cc b/cpp/src/parquet/statistics.cc index ccfb69c487d40..37b245e0dd6c2 100644 --- a/cpp/src/parquet/statistics.cc +++ b/cpp/src/parquet/statistics.cc @@ -30,6 +30,7 @@ #include "arrow/type_traits.h" #include "arrow/util/bit_run_reader.h" #include "arrow/util/checked_cast.h" +#include "arrow/util/float16.h" #include "arrow/util/logging.h" #include "arrow/util/ubsan.h" #include "arrow/visit_data_inline.h" @@ -41,6 +42,7 @@ using arrow::default_memory_pool; using arrow::MemoryPool; using arrow::internal::checked_cast; +using arrow::util::Float16; using arrow::util::SafeCopy; using arrow::util::SafeLoad; @@ -53,6 +55,23 @@ namespace { constexpr int value_length(int value_length, const ByteArray& value) { return value.len; } constexpr int value_length(int type_length, const FLBA& value) { return type_length; } +// Static "constants" for normalizing float16 min/max values. These need to be expressed +// as pointers because `Float16LogicalType` represents an FLBA. +struct Float16Constants { + static constexpr const uint8_t* lowest() { return lowest_.data(); } + static constexpr const uint8_t* max() { return max_.data(); } + static constexpr const uint8_t* positive_zero() { return positive_zero_.data(); } + static constexpr const uint8_t* negative_zero() { return negative_zero_.data(); } + + private: + using Bytes = std::array; + static constexpr Bytes lowest_ = + std::numeric_limits::lowest().ToLittleEndian(); + static constexpr Bytes max_ = std::numeric_limits::max().ToLittleEndian(); + static constexpr Bytes positive_zero_ = (+Float16::FromBits(0)).ToLittleEndian(); + static constexpr Bytes negative_zero_ = (-Float16::FromBits(0)).ToLittleEndian(); +}; + template struct CompareHelper { using T = typename DType::c_type; @@ -277,11 +296,43 @@ template struct CompareHelper : public BinaryLikeCompareHelperBase {}; +template <> +struct CompareHelper { + using T = FLBA; + + static T DefaultMin() { return T{Float16Constants::max()}; } + static T DefaultMax() { return T{Float16Constants::lowest()}; } + + static T Coalesce(T val, T fallback) { + return (val.ptr == nullptr || Float16::FromLittleEndian(val.ptr).is_nan()) ? fallback + : val; + } + + static inline bool Compare(int type_length, const T& a, const T& b) { + const auto lhs = Float16::FromLittleEndian(a.ptr); + const auto rhs = Float16::FromLittleEndian(b.ptr); + // NaN is handled here (same behavior as native float compare) + return lhs < rhs; + } + + static T Min(int type_length, const T& a, const T& b) { + if (a.ptr == nullptr) return b; + if (b.ptr == nullptr) return a; + return Compare(type_length, a, b) ? a : b; + } + + static T Max(int type_length, const T& a, const T& b) { + if (a.ptr == nullptr) return b; + if (b.ptr == nullptr) return a; + return Compare(type_length, a, b) ? b : a; + } +}; + using ::std::optional; template ::arrow::enable_if_t::value, optional>> -CleanStatistic(std::pair min_max) { +CleanStatistic(std::pair min_max, LogicalType::Type::type) { return min_max; } @@ -292,7 +343,7 @@ CleanStatistic(std::pair min_max) { // - If max is -0.0f, replace with 0.0f template ::arrow::enable_if_t::value, optional>> -CleanStatistic(std::pair min_max) { +CleanStatistic(std::pair min_max, LogicalType::Type::type) { T min = min_max.first; T max = min_max.second; @@ -318,25 +369,67 @@ CleanStatistic(std::pair min_max) { return {{min, max}}; } -optional> CleanStatistic(std::pair min_max) { +optional> CleanFloat16Statistic(std::pair min_max) { + FLBA min_flba = min_max.first; + FLBA max_flba = min_max.second; + Float16 min = Float16::FromLittleEndian(min_flba.ptr); + Float16 max = Float16::FromLittleEndian(max_flba.ptr); + + if (min.is_nan() || max.is_nan()) { + return ::std::nullopt; + } + + if (min == std::numeric_limits::max() && + max == std::numeric_limits::lowest()) { + return ::std::nullopt; + } + + if (min.is_zero() && !min.signbit()) { + min_flba = FLBA{Float16Constants::negative_zero()}; + } + if (max.is_zero() && max.signbit()) { + max_flba = FLBA{Float16Constants::positive_zero()}; + } + + return {{min_flba, max_flba}}; +} + +optional> CleanStatistic(std::pair min_max, + LogicalType::Type::type logical_type) { if (min_max.first.ptr == nullptr || min_max.second.ptr == nullptr) { return ::std::nullopt; } + if (logical_type == LogicalType::Type::FLOAT16) { + return CleanFloat16Statistic(std::move(min_max)); + } return min_max; } optional> CleanStatistic( - std::pair min_max) { + std::pair min_max, LogicalType::Type::type) { if (min_max.first.ptr == nullptr || min_max.second.ptr == nullptr) { return ::std::nullopt; } return min_max; } +template +struct RebindLogical { + using DType = T; + using c_type = typename DType::c_type; +}; + +template <> +struct RebindLogical { + using DType = FLBAType; + using c_type = DType::c_type; +}; + template -class TypedComparatorImpl : virtual public TypedComparator { +class TypedComparatorImpl + : virtual public TypedComparator::DType> { public: - using T = typename DType::c_type; + using T = typename RebindLogical::c_type; using Helper = CompareHelper; explicit TypedComparatorImpl(int type_length = -1) : type_length_(type_length) {} @@ -384,7 +477,9 @@ class TypedComparatorImpl : virtual public TypedComparator { return {min, max}; } - std::pair GetMinMax(const ::arrow::Array& values) override; + std::pair GetMinMax(const ::arrow::Array& values) override { + ParquetException::NYI(values.type()->ToString()); + } private: int type_length_; @@ -412,12 +507,6 @@ TypedComparatorImpl::GetMinMax(const int32_t* va return {SafeCopy(min), SafeCopy(max)}; } -template -std::pair -TypedComparatorImpl::GetMinMax(const ::arrow::Array& values) { - ParquetException::NYI(values.type()->ToString()); -} - template std::pair GetMinMaxBinaryHelper( const TypedComparatorImpl& comparator, @@ -458,6 +547,16 @@ std::pair TypedComparatorImpl::GetMi return GetMinMaxBinaryHelper(*this, values); } +LogicalType::Type::type LogicalTypeId(const ColumnDescriptor* descr) { + if (const auto& logical_type = descr->logical_type()) { + return logical_type->type(); + } + return LogicalType::Type::NONE; +} +LogicalType::Type::type LogicalTypeId(const Statistics& stats) { + return LogicalTypeId(stats.descr()); +} + template class TypedStatisticsImpl : public TypedStatistics { public: @@ -468,9 +567,9 @@ class TypedStatisticsImpl : public TypedStatistics { : descr_(descr), pool_(pool), min_buffer_(AllocateBuffer(pool_, 0)), - max_buffer_(AllocateBuffer(pool_, 0)) { - auto comp = Comparator::Make(descr); - comparator_ = std::static_pointer_cast>(comp); + max_buffer_(AllocateBuffer(pool_, 0)), + logical_type_(LogicalTypeId(descr_)) { + comparator_ = MakeComparator(descr); TypedStatisticsImpl::Reset(); } @@ -527,9 +626,27 @@ class TypedStatisticsImpl : public TypedStatistics { void IncrementNumValues(int64_t n) override { num_values_ += n; } + static bool IsMeaningfulLogicalType(LogicalType::Type::type type) { + switch (type) { + case LogicalType::Type::FLOAT16: + return true; + default: + return false; + } + } + bool Equals(const Statistics& raw_other) const override { if (physical_type() != raw_other.physical_type()) return false; + const auto other_logical_type = LogicalTypeId(raw_other); + // Only compare against logical types that influence the interpretation of the + // physical type + if (IsMeaningfulLogicalType(logical_type_)) { + if (logical_type_ != other_logical_type) return false; + } else if (IsMeaningfulLogicalType(other_logical_type)) { + return false; + } + const auto& other = checked_cast(raw_other); if (has_min_max_ != other.has_min_max_) return false; @@ -655,6 +772,7 @@ class TypedStatisticsImpl : public TypedStatistics { EncodedStatistics statistics_; std::shared_ptr> comparator_; std::shared_ptr min_buffer_, max_buffer_; + LogicalType::Type::type logical_type_ = LogicalType::Type::NONE; void PlainEncode(const T& src, std::string* dst) const; void PlainDecode(const std::string& src, T* dst) const; @@ -686,7 +804,7 @@ class TypedStatisticsImpl : public TypedStatistics { void SetMinMaxPair(std::pair min_max) { // CleanStatistic can return a nullopt in case of erroneous values, e.g. NaN - auto maybe_min_max = CleanStatistic(min_max); + auto maybe_min_max = CleanStatistic(min_max, logical_type_); if (!maybe_min_max) return; auto min = maybe_min_max.value().first; @@ -795,12 +913,8 @@ void TypedStatisticsImpl::PlainDecode(const std::string& src, dst->ptr = reinterpret_cast(src.c_str()); } -} // namespace - -// ---------------------------------------------------------------------- -// Public factory functions - -std::shared_ptr Comparator::Make(Type::type physical_type, +std::shared_ptr DoMakeComparator(Type::type physical_type, + LogicalType::Type::type logical_type, SortOrder::type sort_order, int type_length) { if (SortOrder::SIGNED == sort_order) { @@ -820,6 +934,10 @@ std::shared_ptr Comparator::Make(Type::type physical_type, case Type::BYTE_ARRAY: return std::make_shared>(); case Type::FIXED_LEN_BYTE_ARRAY: + if (logical_type == LogicalType::Type::FLOAT16) { + return std::make_shared>( + type_length); + } return std::make_shared>(type_length); default: ParquetException::NYI("Signed Compare not implemented"); @@ -845,8 +963,21 @@ std::shared_ptr Comparator::Make(Type::type physical_type, return nullptr; } +} // namespace + +// ---------------------------------------------------------------------- +// Public factory functions + +std::shared_ptr Comparator::Make(Type::type physical_type, + SortOrder::type sort_order, + int type_length) { + return DoMakeComparator(physical_type, LogicalType::Type::NONE, sort_order, + type_length); +} + std::shared_ptr Comparator::Make(const ColumnDescriptor* descr) { - return Make(descr->physical_type(), descr->sort_order(), descr->type_length()); + return DoMakeComparator(descr->physical_type(), LogicalTypeId(descr), + descr->sort_order(), descr->type_length()); } std::shared_ptr Statistics::Make(const ColumnDescriptor* descr, diff --git a/cpp/src/parquet/statistics_test.cc b/cpp/src/parquet/statistics_test.cc index 637832945ec57..cb2e6455abfa9 100644 --- a/cpp/src/parquet/statistics_test.cc +++ b/cpp/src/parquet/statistics_test.cc @@ -34,6 +34,7 @@ #include "arrow/type_traits.h" #include "arrow/util/bit_util.h" #include "arrow/util/bitmap_ops.h" +#include "arrow/util/float16.h" #include "arrow/util/ubsan.h" #include "parquet/column_reader.h" @@ -49,6 +50,7 @@ using arrow::default_memory_pool; using arrow::MemoryPool; +using arrow::util::Float16; using arrow::util::SafeCopy; namespace bit_util = arrow::bit_util; @@ -875,9 +877,22 @@ TEST(CorrectStatistics, Basics) { // Test SortOrder class static const int NUM_VALUES = 10; -template +template +struct RebindLogical { + using ParquetType = T; + using CType = typename T::c_type; +}; + +template <> +struct RebindLogical { + using ParquetType = FLBAType; + using CType = ParquetType::c_type; +}; + +template class TestStatisticsSortOrder : public ::testing::Test { public: + using TestType = typename RebindLogical::ParquetType; using c_type = typename TestType::c_type; void SetUp() override { @@ -955,7 +970,7 @@ class TestStatisticsSortOrder : public ::testing::Test { }; using CompareTestTypes = ::testing::Types; + ByteArrayType, FLBAType, Float16LogicalType>; // TYPE::INT32 template <> @@ -1102,6 +1117,39 @@ void TestStatisticsSortOrder::SetValues() { .set_max(std::string(reinterpret_cast(&vals[8][0]), FLBA_LENGTH)); } +template <> +void TestStatisticsSortOrder::AddNodes(std::string name) { + auto node = + schema::PrimitiveNode::Make(name, Repetition::REQUIRED, LogicalType::Float16(), + Type::FIXED_LEN_BYTE_ARRAY, sizeof(uint16_t)); + fields_.push_back(std::move(node)); +} + +template <> +void TestStatisticsSortOrder::SetValues() { + constexpr int kValueLen = 2; + constexpr int kNumBytes = NUM_VALUES * kValueLen; + + const Float16 f16_vals[NUM_VALUES] = { + Float16::FromFloat(+2.0f), Float16::FromFloat(-4.0f), Float16::FromFloat(+4.0f), + Float16::FromFloat(-2.0f), Float16::FromFloat(-1.0f), Float16::FromFloat(+3.0f), + Float16::FromFloat(+1.0f), Float16::FromFloat(-5.0f), Float16::FromFloat(+0.0f), + Float16::FromFloat(-3.0f), + }; + + values_buf_.resize(kNumBytes); + uint8_t* ptr = values_buf_.data(); + for (int i = 0; i < NUM_VALUES; ++i) { + f16_vals[i].ToLittleEndian(ptr); + values_[i].ptr = ptr; + ptr += kValueLen; + } + + stats_[0] + .set_min(std::string(reinterpret_cast(values_[7].ptr), kValueLen)) + .set_max(std::string(reinterpret_cast(values_[2].ptr), kValueLen)); +} + TYPED_TEST_SUITE(TestStatisticsSortOrder, CompareTestTypes); TYPED_TEST(TestStatisticsSortOrder, MinMax) { @@ -1167,12 +1215,20 @@ TEST_F(TestStatisticsSortOrderFLBA, UnknownSortOrder) { ASSERT_FALSE(cc_metadata->is_stats_set()); } +template +static std::string EncodeValue(const T& val) { + return std::string(reinterpret_cast(&val), sizeof(val)); +} +static std::string EncodeValue(const FLBA& val, int length = sizeof(uint16_t)) { + return std::string(reinterpret_cast(val.ptr), length); +} + template void AssertMinMaxAre(Stats stats, const Array& values, T expected_min, T expected_max) { stats->Update(values.data(), values.size(), 0); ASSERT_TRUE(stats->HasMinMax()); - EXPECT_EQ(stats->min(), expected_min); - EXPECT_EQ(stats->max(), expected_max); + EXPECT_EQ(stats->EncodeMin(), EncodeValue(expected_min)); + EXPECT_EQ(stats->EncodeMax(), EncodeValue(expected_max)); } template @@ -1184,8 +1240,8 @@ void AssertMinMaxAre(Stats stats, const Array& values, const uint8_t* valid_bitm stats->UpdateSpaced(values.data(), valid_bitmap, 0, non_null_count + null_count, non_null_count, null_count); ASSERT_TRUE(stats->HasMinMax()); - EXPECT_EQ(stats->min(), expected_min); - EXPECT_EQ(stats->max(), expected_max); + EXPECT_EQ(stats->EncodeMin(), EncodeValue(expected_min)); + EXPECT_EQ(stats->EncodeMax(), EncodeValue(expected_max)); } template @@ -1268,50 +1324,225 @@ void CheckExtrema() { TEST(TestStatistic, Int32Extrema) { CheckExtrema(); } TEST(TestStatistic, Int64Extrema) { CheckExtrema(); } -// PARQUET-1225: Float NaN values may lead to incorrect min-max -template -void CheckNaNs() { - using T = typename ParquetType::c_type; +template +class TestFloatStatistics : public ::testing::Test { + public: + using ParquetType = typename RebindLogical::ParquetType; + using c_type = typename ParquetType::c_type; + + void Init(); + void SetUp() override { + this->Init(); + ASSERT_NE(EncodeValue(negative_zero_), EncodeValue(positive_zero_)); + } + + bool signbit(c_type val); + void CheckEq(const c_type& l, const c_type& r); + NodePtr MakeNode(const std::string& name, Repetition::type rep); + + template + void CheckMinMaxZeroesSign(Stats stats, const Values& values) { + stats->Update(values.data(), values.size(), /*null_count=*/0); + ASSERT_TRUE(stats->HasMinMax()); + + this->CheckEq(stats->min(), positive_zero_); + ASSERT_TRUE(this->signbit(stats->min())); + ASSERT_EQ(stats->EncodeMin(), EncodeValue(negative_zero_)); + + this->CheckEq(stats->max(), positive_zero_); + ASSERT_FALSE(this->signbit(stats->max())); + ASSERT_EQ(stats->EncodeMax(), EncodeValue(positive_zero_)); + } + + // ARROW-5562: Ensure that -0.0f and 0.0f values are properly handled like in + // parquet-mr + void TestNegativeZeroes() { + NodePtr node = this->MakeNode("f", Repetition::OPTIONAL); + ColumnDescriptor descr(node, 1, 1); + + { + std::array values{negative_zero_, positive_zero_}; + auto stats = MakeStatistics(&descr); + CheckMinMaxZeroesSign(stats, values); + } + + { + std::array values{positive_zero_, negative_zero_}; + auto stats = MakeStatistics(&descr); + CheckMinMaxZeroesSign(stats, values); + } + + { + std::array values{negative_zero_, negative_zero_}; + auto stats = MakeStatistics(&descr); + CheckMinMaxZeroesSign(stats, values); + } + + { + std::array values{positive_zero_, positive_zero_}; + auto stats = MakeStatistics(&descr); + CheckMinMaxZeroesSign(stats, values); + } + } + + // PARQUET-1225: Float NaN values may lead to incorrect min-max + template + void CheckNaNs(ColumnDescriptor* descr, const Values& all_nans, const Values& some_nans, + const Values& other_nans, c_type min, c_type max, uint8_t valid_bitmap, + uint8_t valid_bitmap_no_nans) { + auto some_nan_stats = MakeStatistics(descr); + // Ingesting only nans should not yield valid min max + AssertUnsetMinMax(some_nan_stats, all_nans); + // Ingesting a mix of NaNs and non-NaNs should yield a valid min max. + AssertMinMaxAre(some_nan_stats, some_nans, min, max); + // Ingesting only nans after a valid min/max, should have no effect + AssertMinMaxAre(some_nan_stats, all_nans, min, max); + + some_nan_stats = MakeStatistics(descr); + AssertUnsetMinMax(some_nan_stats, all_nans, &valid_bitmap); + // NaNs should not pollute min max when excluded via null bitmap. + AssertMinMaxAre(some_nan_stats, some_nans, &valid_bitmap_no_nans, min, max); + // Ingesting NaNs with a null bitmap should not change the result. + AssertMinMaxAre(some_nan_stats, some_nans, &valid_bitmap, min, max); + + // An array that doesn't start with NaN + auto other_stats = MakeStatistics(descr); + AssertMinMaxAre(other_stats, other_nans, min, max); + } + + void TestNaNs(); + + protected: + std::vector data_buf_; + c_type positive_zero_; + c_type negative_zero_; +}; + +template +void TestFloatStatistics::Init() { + positive_zero_ = c_type{}; + negative_zero_ = -positive_zero_; +} +template <> +void TestFloatStatistics::Init() { + data_buf_.resize(4); + (+Float16(0)).ToLittleEndian(&data_buf_[0]); + positive_zero_ = FLBA{&data_buf_[0]}; + (-Float16(0)).ToLittleEndian(&data_buf_[2]); + negative_zero_ = FLBA{&data_buf_[2]}; +} + +template +NodePtr TestFloatStatistics::MakeNode(const std::string& name, Repetition::type rep) { + return PrimitiveNode::Make(name, rep, ParquetType::type_num); +} +template <> +NodePtr TestFloatStatistics::MakeNode(const std::string& name, + Repetition::type rep) { + return PrimitiveNode::Make(name, rep, LogicalType::Float16(), + Type::FIXED_LEN_BYTE_ARRAY, 2); +} + +template +void TestFloatStatistics::CheckEq(const c_type& l, const c_type& r) { + ASSERT_EQ(l, r); +} +template <> +void TestFloatStatistics::CheckEq(const c_type& a, const c_type& b) { + auto l = Float16::FromLittleEndian(a.ptr); + auto r = Float16::FromLittleEndian(b.ptr); + ASSERT_EQ(l, r); +} +template +bool TestFloatStatistics::signbit(c_type val) { + return std::signbit(val); +} +template <> +bool TestFloatStatistics::signbit(c_type val) { + return Float16::FromLittleEndian(val.ptr).signbit(); +} + +template +void TestFloatStatistics::TestNaNs() { constexpr int kNumValues = 8; - NodePtr node = PrimitiveNode::Make("f", Repetition::OPTIONAL, ParquetType::type_num); + NodePtr node = this->MakeNode("f", Repetition::OPTIONAL); ColumnDescriptor descr(node, 1, 1); - constexpr T nan = std::numeric_limits::quiet_NaN(); - constexpr T min = -4.0f; - constexpr T max = 3.0f; + constexpr c_type nan = std::numeric_limits::quiet_NaN(); + constexpr c_type min = -4.0f; + constexpr c_type max = 3.0f; + + std::array all_nans{nan, nan, nan, nan, nan, nan, nan, nan}; + std::array some_nans{nan, max, -3.0f, -1.0f, nan, 2.0f, min, nan}; + std::array other_nans{1.5f, max, -3.0f, -1.0f, nan, 2.0f, min, nan}; - std::array all_nans{nan, nan, nan, nan, nan, nan, nan, nan}; - std::array some_nans{nan, max, -3.0f, -1.0f, nan, 2.0f, min, nan}; uint8_t valid_bitmap = 0x7F; // 0b01111111 // NaNs excluded uint8_t valid_bitmap_no_nans = 0x6E; // 0b01101110 - // Test values - auto some_nan_stats = MakeStatistics(&descr); - // Ingesting only nans should not yield valid min max - AssertUnsetMinMax(some_nan_stats, all_nans); - // Ingesting a mix of NaNs and non-NaNs should not yield valid min max. - AssertMinMaxAre(some_nan_stats, some_nans, min, max); - // Ingesting only nans after a valid min/max, should have not effect - AssertMinMaxAre(some_nan_stats, all_nans, min, max); + this->CheckNaNs(&descr, all_nans, some_nans, other_nans, min, max, valid_bitmap, + valid_bitmap_no_nans); +} - some_nan_stats = MakeStatistics(&descr); - AssertUnsetMinMax(some_nan_stats, all_nans, &valid_bitmap); - // NaNs should not pollute min max when excluded via null bitmap. - AssertMinMaxAre(some_nan_stats, some_nans, &valid_bitmap_no_nans, min, max); - // Ingesting NaNs with a null bitmap should not change the result. - AssertMinMaxAre(some_nan_stats, some_nans, &valid_bitmap, min, max); +struct BufferedFloat16 { + explicit BufferedFloat16(Float16 f16) : f16(f16) { + this->f16.ToLittleEndian(bytes_.data()); + } + explicit BufferedFloat16(float f) : BufferedFloat16(Float16::FromFloat(f)) {} + const uint8_t* bytes() const { return bytes_.data(); } + + Float16 f16; + std::array bytes_; +}; + +template <> +void TestFloatStatistics::TestNaNs() { + constexpr int kNumValues = 8; + + NodePtr node = this->MakeNode("f", Repetition::OPTIONAL); + ColumnDescriptor descr(node, 1, 1); + + using F16 = BufferedFloat16; + const auto nan_f16 = F16(std::numeric_limits::quiet_NaN()); + const auto min_f16 = F16(-4.0f); + const auto max_f16 = F16(+3.0f); + + const auto min = FLBA{min_f16.bytes()}; + const auto max = FLBA{max_f16.bytes()}; + + std::array all_nans_f16 = {nan_f16, nan_f16, nan_f16, nan_f16, + nan_f16, nan_f16, nan_f16, nan_f16}; + std::array some_nans_f16 = { + nan_f16, max_f16, F16(-3.0f), F16(-1.0f), nan_f16, F16(+2.0f), min_f16, nan_f16}; + std::array other_nans_f16 = some_nans_f16; + other_nans_f16[0] = F16(+1.5f); // +1.5 + + auto prepare_values = [](const auto& values) -> std::vector { + std::vector out(values.size()); + std::transform(values.begin(), values.end(), out.begin(), + [](const F16& f16) { return FLBA{f16.bytes()}; }); + return out; + }; + + auto all_nans = prepare_values(all_nans_f16); + auto some_nans = prepare_values(some_nans_f16); + auto other_nans = prepare_values(other_nans_f16); + + uint8_t valid_bitmap = 0x7F; // 0b01111111 + // NaNs excluded + uint8_t valid_bitmap_no_nans = 0x6E; // 0b01101110 - // An array that doesn't start with NaN - std::array other_nans{1.5f, max, -3.0f, -1.0f, nan, 2.0f, min, nan}; - auto other_stats = MakeStatistics(&descr); - AssertMinMaxAre(other_stats, other_nans, min, max); + this->CheckNaNs(&descr, all_nans, some_nans, other_nans, min, max, valid_bitmap, + valid_bitmap_no_nans); } -TEST(TestStatistic, NaNFloatValues) { CheckNaNs(); } +using FloatingPointTypes = ::testing::Types; + +TYPED_TEST_SUITE(TestFloatStatistics, FloatingPointTypes); -TEST(TestStatistic, NaNDoubleValues) { CheckNaNs(); } +TYPED_TEST(TestFloatStatistics, NegativeZeros) { this->TestNegativeZeroes(); } +TYPED_TEST(TestFloatStatistics, NaNs) { this->TestNaNs(); } // ARROW-7376 TEST(TestStatisticsSortOrderFloatNaN, NaNAndNullsInfiniteLoop) { @@ -1327,58 +1558,6 @@ TEST(TestStatisticsSortOrderFloatNaN, NaNAndNullsInfiniteLoop) { AssertUnsetMinMax(stats, nans_but_last, &all_but_last_valid); } -template -void AssertMinMaxZeroesSign(Stats stats, const Array& values) { - stats->Update(values.data(), values.size(), 0); - ASSERT_TRUE(stats->HasMinMax()); - - T zero{}; - ASSERT_EQ(stats->min(), zero); - ASSERT_TRUE(std::signbit(stats->min())); - - ASSERT_EQ(stats->max(), zero); - ASSERT_FALSE(std::signbit(stats->max())); -} - -// ARROW-5562: Ensure that -0.0f and 0.0f values are properly handled like in -// parquet-mr -template -void CheckNegativeZeroStats() { - using T = typename ParquetType::c_type; - - NodePtr node = PrimitiveNode::Make("f", Repetition::OPTIONAL, ParquetType::type_num); - ColumnDescriptor descr(node, 1, 1); - T zero{}; - - { - std::array values{-zero, zero}; - auto stats = MakeStatistics(&descr); - AssertMinMaxZeroesSign(stats, values); - } - - { - std::array values{zero, -zero}; - auto stats = MakeStatistics(&descr); - AssertMinMaxZeroesSign(stats, values); - } - - { - std::array values{-zero, -zero}; - auto stats = MakeStatistics(&descr); - AssertMinMaxZeroesSign(stats, values); - } - - { - std::array values{zero, zero}; - auto stats = MakeStatistics(&descr); - AssertMinMaxZeroesSign(stats, values); - } -} - -TEST(TestStatistics, FloatNegativeZero) { CheckNegativeZeroStats(); } - -TEST(TestStatistics, DoubleNegativeZero) { CheckNegativeZeroStats(); } - // Test statistics for binary column with UNSIGNED sort order TEST(TestStatisticsSortOrderMinMax, Unsigned) { std::string dir_string(test::get_data_dir()); diff --git a/cpp/src/parquet/test_util.cc b/cpp/src/parquet/test_util.cc index b65945cc7329f..a6fa8afc0f5b3 100644 --- a/cpp/src/parquet/test_util.cc +++ b/cpp/src/parquet/test_util.cc @@ -101,6 +101,16 @@ void random_Int96_numbers(int n, uint32_t seed, int32_t min_value, int32_t max_v } } +void random_float16_numbers(int n, uint32_t seed, ::arrow::util::Float16 min_value, + ::arrow::util::Float16 max_value, uint16_t* out) { + std::vector values(n); + random_numbers(n, seed, static_cast(min_value), static_cast(max_value), + values.data()); + for (int i = 0; i < n; ++i) { + out[i] = ::arrow::util::Float16(values[i]).bits(); + } +} + void random_fixed_byte_array(int n, uint32_t seed, uint8_t* buf, int len, FLBA* out) { std::default_random_engine gen(seed); std::uniform_int_distribution d(0, 255); diff --git a/cpp/src/parquet/test_util.h b/cpp/src/parquet/test_util.h index c8578609e9b1d..59728cf53f699 100644 --- a/cpp/src/parquet/test_util.h +++ b/cpp/src/parquet/test_util.h @@ -33,6 +33,7 @@ #include "arrow/io/memory.h" #include "arrow/testing/util.h" +#include "arrow/util/float16.h" #include "parquet/column_page.h" #include "parquet/column_reader.h" @@ -148,6 +149,9 @@ inline void random_numbers(int n, uint32_t seed, double min_value, double max_va void random_Int96_numbers(int n, uint32_t seed, int32_t min_value, int32_t max_value, Int96* out); +void random_float16_numbers(int n, uint32_t seed, ::arrow::util::Float16 min_value, + ::arrow::util::Float16 max_value, uint16_t* out); + void random_fixed_byte_array(int n, uint32_t seed, uint8_t* buf, int len, FLBA* out); void random_byte_array(int n, uint32_t seed, uint8_t* buf, ByteArray* out, int min_size, diff --git a/cpp/src/parquet/types.cc b/cpp/src/parquet/types.cc index 3127b60e5d1ae..04a0fc2e0117b 100644 --- a/cpp/src/parquet/types.cc +++ b/cpp/src/parquet/types.cc @@ -441,6 +441,8 @@ std::shared_ptr LogicalType::FromThrift( return BSONLogicalType::Make(); } else if (type.__isset.UUID) { return UUIDLogicalType::Make(); + } else if (type.__isset.FLOAT16) { + return Float16LogicalType::Make(); } else { throw ParquetException("Metadata contains Thrift LogicalType that is not recognized"); } @@ -494,6 +496,10 @@ std::shared_ptr LogicalType::BSON() { return BSONLogicalType: std::shared_ptr LogicalType::UUID() { return UUIDLogicalType::Make(); } +std::shared_ptr LogicalType::Float16() { + return Float16LogicalType::Make(); +} + std::shared_ptr LogicalType::None() { return NoLogicalType::Make(); } /* @@ -575,6 +581,7 @@ class LogicalType::Impl { class JSON; class BSON; class UUID; + class Float16; class No; class Undefined; @@ -644,6 +651,9 @@ bool LogicalType::is_null() const { return impl_->type() == LogicalType::Type::N bool LogicalType::is_JSON() const { return impl_->type() == LogicalType::Type::JSON; } bool LogicalType::is_BSON() const { return impl_->type() == LogicalType::Type::BSON; } bool LogicalType::is_UUID() const { return impl_->type() == LogicalType::Type::UUID; } +bool LogicalType::is_float16() const { + return impl_->type() == LogicalType::Type::FLOAT16; +} bool LogicalType::is_none() const { return impl_->type() == LogicalType::Type::NONE; } bool LogicalType::is_valid() const { return impl_->type() != LogicalType::Type::UNDEFINED; @@ -1557,6 +1567,22 @@ class LogicalType::Impl::UUID final : public LogicalType::Impl::Incompatible, GENERATE_MAKE(UUID) +class LogicalType::Impl::Float16 final : public LogicalType::Impl::Incompatible, + public LogicalType::Impl::TypeLengthApplicable { + public: + friend class Float16LogicalType; + + OVERRIDE_TOSTRING(Float16) + OVERRIDE_TOTHRIFT(Float16Type, FLOAT16) + + private: + Float16() + : LogicalType::Impl(LogicalType::Type::FLOAT16, SortOrder::SIGNED), + LogicalType::Impl::TypeLengthApplicable(parquet::Type::FIXED_LEN_BYTE_ARRAY, 2) {} +}; + +GENERATE_MAKE(Float16) + class LogicalType::Impl::No final : public LogicalType::Impl::SimpleCompatible, public LogicalType::Impl::UniversalApplicable { public: diff --git a/cpp/src/parquet/types.h b/cpp/src/parquet/types.h index 0315376a883e9..76dd0efc7cb4a 100644 --- a/cpp/src/parquet/types.h +++ b/cpp/src/parquet/types.h @@ -157,6 +157,7 @@ class PARQUET_EXPORT LogicalType { JSON, BSON, UUID, + FLOAT16, NONE // Not a real logical type; should always be last element }; }; @@ -210,6 +211,7 @@ class PARQUET_EXPORT LogicalType { static std::shared_ptr JSON(); static std::shared_ptr BSON(); static std::shared_ptr UUID(); + static std::shared_ptr Float16(); /// \brief Create a placeholder for when no logical type is specified static std::shared_ptr None(); @@ -263,6 +265,7 @@ class PARQUET_EXPORT LogicalType { bool is_JSON() const; bool is_BSON() const; bool is_UUID() const; + bool is_float16() const; bool is_none() const; /// \brief Return true if this logical type is of a known type. bool is_valid() const; @@ -433,6 +436,16 @@ class PARQUET_EXPORT UUIDLogicalType : public LogicalType { UUIDLogicalType() = default; }; +/// \brief Allowed for physical type FIXED_LEN_BYTE_ARRAY with length 2, +/// must encode raw FLOAT16 bytes. +class PARQUET_EXPORT Float16LogicalType : public LogicalType { + public: + static std::shared_ptr Make(); + + private: + Float16LogicalType() = default; +}; + /// \brief Allowed for any physical type. class PARQUET_EXPORT NoLogicalType : public LogicalType { public: diff --git a/cpp/submodules/parquet-testing b/cpp/submodules/parquet-testing index b2e7cc7551591..e45cd23f784aa 160000 --- a/cpp/submodules/parquet-testing +++ b/cpp/submodules/parquet-testing @@ -1 +1 @@ -Subproject commit b2e7cc755159196e3a068c8594f7acbaecfdaaac +Subproject commit e45cd23f784aab3d6bf0701f8f4e621469ed3be7 diff --git a/cpp/vcpkg.json b/cpp/vcpkg.json index 71855dafdea35..c0bf5dce50e32 100644 --- a/cpp/vcpkg.json +++ b/cpp/vcpkg.json @@ -1,6 +1,6 @@ { "name": "arrow", - "version-string": "14.0.0-SNAPSHOT", + "version-string": "15.0.0-SNAPSHOT", "dependencies": [ "abseil", { diff --git a/csharp/Directory.Build.props b/csharp/Directory.Build.props index b6c7c75c237e5..ae6edda0e2f0e 100644 --- a/csharp/Directory.Build.props +++ b/csharp/Directory.Build.props @@ -29,7 +29,7 @@ Apache Arrow library Copyright 2016-2019 The Apache Software Foundation The Apache Software Foundation - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT diff --git a/csharp/src/Apache.Arrow.Flight.AspNetCore/Apache.Arrow.Flight.AspNetCore.csproj b/csharp/src/Apache.Arrow.Flight.AspNetCore/Apache.Arrow.Flight.AspNetCore.csproj index 47d7e9746dea1..04cf06b1ec440 100644 --- a/csharp/src/Apache.Arrow.Flight.AspNetCore/Apache.Arrow.Flight.AspNetCore.csproj +++ b/csharp/src/Apache.Arrow.Flight.AspNetCore/Apache.Arrow.Flight.AspNetCore.csproj @@ -5,7 +5,7 @@ - + diff --git a/csharp/src/Apache.Arrow.Flight/Apache.Arrow.Flight.csproj b/csharp/src/Apache.Arrow.Flight/Apache.Arrow.Flight.csproj index d771aebd96796..67b37e49c7dc5 100644 --- a/csharp/src/Apache.Arrow.Flight/Apache.Arrow.Flight.csproj +++ b/csharp/src/Apache.Arrow.Flight/Apache.Arrow.Flight.csproj @@ -5,8 +5,8 @@ - - + + diff --git a/csharp/src/Apache.Arrow/Arrays/Decimal128Array.cs b/csharp/src/Apache.Arrow/Arrays/Decimal128Array.cs index 128e9e5f0818e..01724e2acda3e 100644 --- a/csharp/src/Apache.Arrow/Arrays/Decimal128Array.cs +++ b/csharp/src/Apache.Arrow/Arrays/Decimal128Array.cs @@ -15,6 +15,9 @@ using System; using System.Collections.Generic; +#if !NETSTANDARD1_3 +using System.Data.SqlTypes; +#endif using System.Diagnostics; using System.Numerics; using Apache.Arrow.Arrays; @@ -61,6 +64,62 @@ public Builder AppendRange(IEnumerable values) return Instance; } + public Builder Append(string value) + { + if (value == null) + { + AppendNull(); + } + else + { + Span bytes = stackalloc byte[DataType.ByteWidth]; + DecimalUtility.GetBytes(value, DataType.Precision, DataType.Scale, ByteWidth, bytes); + Append(bytes); + } + + return Instance; + } + + public Builder AppendRange(IEnumerable values) + { + if (values == null) + { + throw new ArgumentNullException(nameof(values)); + } + + foreach (string s in values) + { + Append(s); + } + + return Instance; + } + +#if !NETSTANDARD1_3 + public Builder Append(SqlDecimal value) + { + Span bytes = stackalloc byte[DataType.ByteWidth]; + DecimalUtility.GetBytes(value, DataType.Precision, DataType.Scale, bytes); + + return Append(bytes); + } + + public Builder AppendRange(IEnumerable values) + { + if (values == null) + { + throw new ArgumentNullException(nameof(values)); + } + + foreach (SqlDecimal d in values) + { + Append(d); + } + + return Instance; + } +#endif + public Builder Set(int index, decimal value) { Span bytes = stackalloc byte[DataType.ByteWidth]; @@ -91,5 +150,26 @@ public Decimal128Array(ArrayData data) } return DecimalUtility.GetDecimal(ValueBuffer, index, Scale, ByteWidth); } + + public string GetString(int index) + { + if (IsNull(index)) + { + return null; + } + return DecimalUtility.GetString(ValueBuffer, index, Precision, Scale, ByteWidth); + } + +#if !NETSTANDARD1_3 + public SqlDecimal? GetSqlDecimal(int index) + { + if (IsNull(index)) + { + return null; + } + + return DecimalUtility.GetSqlDecimal128(ValueBuffer, index, Precision, Scale); + } +#endif } } diff --git a/csharp/src/Apache.Arrow/Arrays/Decimal256Array.cs b/csharp/src/Apache.Arrow/Arrays/Decimal256Array.cs index fb4cd6be396e3..f314c2d6ebc9e 100644 --- a/csharp/src/Apache.Arrow/Arrays/Decimal256Array.cs +++ b/csharp/src/Apache.Arrow/Arrays/Decimal256Array.cs @@ -15,8 +15,10 @@ using System; using System.Collections.Generic; +#if !NETSTANDARD1_3 +using System.Data.SqlTypes; +#endif using System.Diagnostics; -using System.Numerics; using Apache.Arrow.Arrays; using Apache.Arrow.Types; @@ -61,6 +63,68 @@ public Builder AppendRange(IEnumerable values) return Instance; } + public Builder Append(string value) + { + if (value == null) + { + AppendNull(); + } + else + { + Span bytes = stackalloc byte[DataType.ByteWidth]; + DecimalUtility.GetBytes(value, DataType.Precision, DataType.Scale, ByteWidth, bytes); + Append(bytes); + } + + return Instance; + } + + public Builder AppendRange(IEnumerable values) + { + if (values == null) + { + throw new ArgumentNullException(nameof(values)); + } + + foreach (string s in values) + { + Append(s); + } + + return Instance; + } + +#if !NETSTANDARD1_3 + public Builder Append(SqlDecimal value) + { + Span bytes = stackalloc byte[DataType.ByteWidth]; + DecimalUtility.GetBytes(value, DataType.Precision, DataType.Scale, bytes); + if (!value.IsPositive) + { + var span = bytes.CastTo(); + span[2] = -1; + span[3] = -1; + } + + return Append(bytes); + } + + public Builder AppendRange(IEnumerable values) + { + if (values == null) + { + throw new ArgumentNullException(nameof(values)); + } + + foreach (SqlDecimal d in values) + { + Append(d); + } + + return Instance; + } +#endif + public Builder Set(int index, decimal value) { Span bytes = stackalloc byte[DataType.ByteWidth]; @@ -92,5 +156,37 @@ public Decimal256Array(ArrayData data) return DecimalUtility.GetDecimal(ValueBuffer, index, Scale, ByteWidth); } + + public string GetString(int index) + { + if (IsNull(index)) + { + return null; + } + return DecimalUtility.GetString(ValueBuffer, index, Precision, Scale, ByteWidth); + } + +#if !NETSTANDARD1_3 + public bool TryGetSqlDecimal(int index, out SqlDecimal? value) + { + if (IsNull(index)) + { + value = null; + return true; + } + + const int longWidth = 4; + var span = ValueBuffer.Span.CastTo().Slice(index * longWidth); + if ((span[2] == 0 && span[3] == 0) || + (span[2] == -1 && span[3] == -1)) + { + value = DecimalUtility.GetSqlDecimal128(ValueBuffer, 2 * index, Precision, Scale); + return true; + } + + value = null; + return false; + } +#endif } } diff --git a/csharp/src/Apache.Arrow/ChunkedArray.cs b/csharp/src/Apache.Arrow/ChunkedArray.cs index f5909f5adfe48..85b3560c229f2 100644 --- a/csharp/src/Apache.Arrow/ChunkedArray.cs +++ b/csharp/src/Apache.Arrow/ChunkedArray.cs @@ -92,6 +92,8 @@ public ChunkedArray Slice(long offset) return Slice(offset, Length - offset); } + public override string ToString() => $"{nameof(ChunkedArray)}: Length={Length}, DataType={DataType.Name}"; + private static IArrowArray[] Cast(IList arrays) { IArrowArray[] arrowArrays = new IArrowArray[arrays.Count]; diff --git a/csharp/src/Apache.Arrow/DecimalUtility.cs b/csharp/src/Apache.Arrow/DecimalUtility.cs index 4a29d068c6eff..bb3f0834fcec3 100644 --- a/csharp/src/Apache.Arrow/DecimalUtility.cs +++ b/csharp/src/Apache.Arrow/DecimalUtility.cs @@ -14,6 +14,9 @@ // limitations under the License. using System; +#if !NETSTANDARD1_3 +using System.Data.SqlTypes; +#endif using System.Numerics; namespace Apache.Arrow @@ -73,6 +76,139 @@ internal static decimal GetDecimal(in ArrowBuffer valueBuffer, int index, int sc } } +#if NETCOREAPP + internal unsafe static string GetString(in ArrowBuffer valueBuffer, int index, int precision, int scale, int byteWidth) + { + int startIndex = index * byteWidth; + ReadOnlySpan value = valueBuffer.Span.Slice(startIndex, byteWidth); + BigInteger integerValue = new BigInteger(value); + if (scale == 0) + { + return integerValue.ToString(); + } + + bool negative = integerValue.Sign < 0; + if (negative) + { + integerValue = -integerValue; + } + + int start = scale + 3; + Span result = stackalloc char[start + precision]; + if (!integerValue.TryFormat(result.Slice(start), out int charsWritten) || charsWritten > precision) + { + throw new OverflowException($"Value: {integerValue} cannot be formatted"); + } + + if (scale >= charsWritten) + { + int length = charsWritten; + result[++length] = '0'; + result[++length] = '.'; + while (scale > length - 2) + { + result[++length] = '0'; + } + start = charsWritten + 1; + charsWritten = length; + } + else + { + result.Slice(start, charsWritten - scale).CopyTo(result.Slice(--start)); + charsWritten++; + result[charsWritten + 1] = '.'; + } + + if (negative) + { + result[--start] = '-'; + charsWritten++; + } + + return new string(result.Slice(start, charsWritten)); + } +#else + internal unsafe static string GetString(in ArrowBuffer valueBuffer, int index, int precision, int scale, int byteWidth) + { + int startIndex = index * byteWidth; + ReadOnlySpan value = valueBuffer.Span.Slice(startIndex, byteWidth); + BigInteger integerValue = new BigInteger(value.ToArray()); + if (scale == 0) + { + return integerValue.ToString(); + } + + bool negative = integerValue.Sign < 0; + if (negative) + { + integerValue = -integerValue; + } + + string toString = integerValue.ToString(); + int charsWritten = toString.Length; + if (charsWritten > precision) + { + throw new OverflowException($"Value: {integerValue} cannot be formatted"); + } + + char[] result = new char[precision + 2]; + int pos = 0; + if (negative) + { + result[pos++] = '-'; + } + if (scale >= charsWritten) + { + result[pos++] = '0'; + result[pos++] = '.'; + int length = 0; + while (scale > charsWritten + length) + { + result[pos++] = '0'; + length++; + } + toString.CopyTo(0, result, pos, charsWritten); + pos += charsWritten; + } + else + { + int wholePartLength = charsWritten - scale; + toString.CopyTo(0, result, pos, wholePartLength); + pos += wholePartLength; + result[pos++] = '.'; + toString.CopyTo(wholePartLength, result, pos, scale); + pos += scale; + } + return new string(result, 0, pos); + } +#endif + +#if !NETSTANDARD1_3 + internal static SqlDecimal GetSqlDecimal128(in ArrowBuffer valueBuffer, int index, int precision, int scale) + { + const int byteWidth = 16; + const int intWidth = byteWidth / 4; + const int longWidth = byteWidth / 8; + + byte mostSignificantByte = valueBuffer.Span[(index + 1) * byteWidth - 1]; + bool isPositive = (mostSignificantByte & 0x80) == 0; + + if (isPositive) + { + ReadOnlySpan value = valueBuffer.Span.CastTo().Slice(index * intWidth, intWidth); + return new SqlDecimal((byte)precision, (byte)scale, true, value[0], value[1], value[2], value[3]); + } + else + { + ReadOnlySpan value = valueBuffer.Span.CastTo().Slice(index * longWidth, longWidth); + long data1 = -value[0]; + long data2 = (data1 == 0) ? -value[1] : ~value[1]; + + return new SqlDecimal((byte)precision, (byte)scale, false, (int)(data1 & 0xffffffff), (int)(data1 >> 32), (int)(data2 & 0xffffffff), (int)(data2 >> 32)); + } + } +#endif + private static decimal DivideByScale(BigInteger integerValue, int scale) { decimal result = (decimal)integerValue; // this cast is safe here @@ -169,5 +305,147 @@ internal static void GetBytes(decimal value, int precision, int scale, int byteW } } } + + internal static void GetBytes(string value, int precision, int scale, int byteWidth, Span bytes) + { + if (value == null || value.Length == 0) + { + throw new ArgumentException("numeric value may not be null or blank", nameof(value)); + } + + int start = 0; + if (value[0] == '-' || value[0] == '+') + { + start++; + } + while (value[start] == '0' && start < value.Length - 1) + { + start++; + } + + int pos = value.IndexOf('.'); + int neededPrecision = value.Length - start; + int neededScale; + if (pos == -1) + { + neededScale = 0; + } + else + { + neededPrecision--; + neededScale = value.Length - pos - 1; + } + + if (neededScale > scale) + { + throw new OverflowException($"Decimal scale cannot be greater than that in the Arrow vector: {value} has scale > {scale}"); + } + if (neededPrecision > precision) + { + throw new OverflowException($"Decimal precision cannot be greater than that in the Arrow vector: {value} has precision > {precision}"); + } + +#if NETCOREAPP + ReadOnlySpan src = value.AsSpan(); + Span buffer = stackalloc char[precision + start + 1]; + + int end; + if (pos == -1) + { + src.CopyTo(buffer); + end = src.Length; + } + else + { + src.Slice(0, pos).CopyTo(buffer); + src.Slice(pos + 1).CopyTo(buffer.Slice(pos)); + end = src.Length - 1; + } + + while (neededScale < scale) + { + buffer[end++] = '0'; + neededScale++; + } + + if (!BigInteger.TryParse(buffer.Slice(0, end), out BigInteger bigInt)) + { + throw new ArgumentException($"Unable to parse {value} as decimal"); + } + + if (!bigInt.TryWriteBytes(bytes, out int bytesWritten, false, !BitConverter.IsLittleEndian)) + { + throw new OverflowException("Could not extract bytes from integer value " + bigInt); + } +#else + char[] buffer = new char[precision + start + 1]; + + int end; + if (pos == -1) + { + value.CopyTo(0, buffer, 0, value.Length); + end = value.Length; + } + else + { + value.CopyTo(0, buffer, 0, pos); + value.CopyTo(pos + 1, buffer, pos, neededScale); + end = value.Length - 1; + } + + while (neededScale < scale) + { + buffer[end++] = '0'; + neededScale++; + } + + if (!BigInteger.TryParse(new string(buffer, 0, end), out BigInteger bigInt)) + { + throw new ArgumentException($"Unable to parse {value} as decimal"); + } + + byte[] tempBytes = bigInt.ToByteArray(); + try + { + tempBytes.CopyTo(bytes); + } + catch (ArgumentException) + { + throw new OverflowException("Could not extract bytes from integer value " + bigInt); + } + int bytesWritten = tempBytes.Length; +#endif + + if (bytes.Length > byteWidth) + { + throw new OverflowException($"Decimal size greater than {byteWidth} bytes: {bytes.Length}"); + } + + byte fill = bigInt.Sign == -1 ? (byte)255 : (byte)0; + for (int i = bytesWritten; i < byteWidth; i++) + { + bytes[i] = fill; + } + } + +#if !NETSTANDARD1_3 + internal static void GetBytes(SqlDecimal value, int precision, int scale, Span bytes) + { + if (value.Precision != precision || value.Scale != scale) + { + value = SqlDecimal.ConvertToPrecScale(value, precision, scale); + } + + // TODO: Consider groveling in the internals to avoid the probable allocation + Span span = bytes.CastTo(); + value.Data.AsSpan().CopyTo(span); + if (!value.IsPositive) + { + Span longSpan = bytes.CastTo(); + longSpan[0] = -longSpan[0]; + longSpan[1] = (longSpan[0] == 0) ? -longSpan[1] : ~longSpan[1]; + } + } +#endif } } diff --git a/csharp/src/Apache.Arrow/Field.cs b/csharp/src/Apache.Arrow/Field.cs index 562b9587bddb9..4fddd1bc4e2de 100644 --- a/csharp/src/Apache.Arrow/Field.cs +++ b/csharp/src/Apache.Arrow/Field.cs @@ -61,5 +61,7 @@ private Field(string name, IArrowType dataType, bool nullable, bool allowBlankNa DataType = dataType ?? NullType.Default; IsNullable = nullable; } + + public override string ToString() => $"{nameof(Field)}: Name={Name}, DataType={DataType.Name}, IsNullable={IsNullable}, Metadata count={Metadata?.Count ?? 0}"; } } diff --git a/csharp/src/Apache.Arrow/RecordBatch.cs b/csharp/src/Apache.Arrow/RecordBatch.cs index f87081d2987ac..566c77830265e 100644 --- a/csharp/src/Apache.Arrow/RecordBatch.cs +++ b/csharp/src/Apache.Arrow/RecordBatch.cs @@ -93,5 +93,7 @@ public RecordBatch Clone(MemoryAllocator allocator = default) IEnumerable arrays = _arrays.Select(array => ArrowArrayFactory.BuildArray(array.Data.Clone(allocator))); return new RecordBatch(Schema, arrays, Length); } + + public override string ToString() => $"{nameof(RecordBatch)}: {ColumnCount} columns by {Length} rows"; } } diff --git a/csharp/src/Apache.Arrow/Schema.cs b/csharp/src/Apache.Arrow/Schema.cs index 5d6b2b7bbdc4c..608b967630079 100644 --- a/csharp/src/Apache.Arrow/Schema.cs +++ b/csharp/src/Apache.Arrow/Schema.cs @@ -114,5 +114,7 @@ public Schema SetField(int fieldIndex, Field newField) return new Schema(fields, Metadata); } + + public override string ToString() => $"{nameof(Schema)}: Num fields={_fieldsList.Count}, Num metadata={Metadata?.Count ?? 0}"; } } diff --git a/csharp/src/Apache.Arrow/Table.cs b/csharp/src/Apache.Arrow/Table.cs index 939ec23f54ff2..dd21cf1d0b39c 100644 --- a/csharp/src/Apache.Arrow/Table.cs +++ b/csharp/src/Apache.Arrow/Table.cs @@ -107,6 +107,8 @@ public Table SetColumn(int columnIndex, Column column) return new Table(newSchema, newColumns); } + public override string ToString() => $"{nameof(Table)}: {ColumnCount} columns by {RowCount} rows"; + // TODO: Flatten for Tables with Lists/Structs? } } diff --git a/csharp/test/Apache.Arrow.Benchmarks/Apache.Arrow.Benchmarks.csproj b/csharp/test/Apache.Arrow.Benchmarks/Apache.Arrow.Benchmarks.csproj index 5c2c1823fec99..df76fd4a7b45f 100644 --- a/csharp/test/Apache.Arrow.Benchmarks/Apache.Arrow.Benchmarks.csproj +++ b/csharp/test/Apache.Arrow.Benchmarks/Apache.Arrow.Benchmarks.csproj @@ -6,8 +6,8 @@ - - + + diff --git a/csharp/test/Apache.Arrow.Compression.Tests/Apache.Arrow.Compression.Tests.csproj b/csharp/test/Apache.Arrow.Compression.Tests/Apache.Arrow.Compression.Tests.csproj index e63c27d022c24..475d7ccc3ef28 100644 --- a/csharp/test/Apache.Arrow.Compression.Tests/Apache.Arrow.Compression.Tests.csproj +++ b/csharp/test/Apache.Arrow.Compression.Tests/Apache.Arrow.Compression.Tests.csproj @@ -7,8 +7,8 @@ - - + + diff --git a/csharp/test/Apache.Arrow.Flight.Sql.Tests/Apache.Arrow.Flight.Sql.Tests.csproj b/csharp/test/Apache.Arrow.Flight.Sql.Tests/Apache.Arrow.Flight.Sql.Tests.csproj index ed67fafbf8c72..656ee6a2470e4 100644 --- a/csharp/test/Apache.Arrow.Flight.Sql.Tests/Apache.Arrow.Flight.Sql.Tests.csproj +++ b/csharp/test/Apache.Arrow.Flight.Sql.Tests/Apache.Arrow.Flight.Sql.Tests.csproj @@ -6,8 +6,8 @@ - - + + diff --git a/csharp/test/Apache.Arrow.Flight.TestWeb/Apache.Arrow.Flight.TestWeb.csproj b/csharp/test/Apache.Arrow.Flight.TestWeb/Apache.Arrow.Flight.TestWeb.csproj index 81468deea6c09..3aaebb103f9da 100644 --- a/csharp/test/Apache.Arrow.Flight.TestWeb/Apache.Arrow.Flight.TestWeb.csproj +++ b/csharp/test/Apache.Arrow.Flight.TestWeb/Apache.Arrow.Flight.TestWeb.csproj @@ -5,7 +5,7 @@ - + diff --git a/csharp/test/Apache.Arrow.Flight.Tests/Apache.Arrow.Flight.Tests.csproj b/csharp/test/Apache.Arrow.Flight.Tests/Apache.Arrow.Flight.Tests.csproj index e4e3c053f943d..53fdd6d62dbcb 100644 --- a/csharp/test/Apache.Arrow.Flight.Tests/Apache.Arrow.Flight.Tests.csproj +++ b/csharp/test/Apache.Arrow.Flight.Tests/Apache.Arrow.Flight.Tests.csproj @@ -6,8 +6,8 @@ - - + + diff --git a/csharp/test/Apache.Arrow.Tests/Apache.Arrow.Tests.csproj b/csharp/test/Apache.Arrow.Tests/Apache.Arrow.Tests.csproj index 2835c60da414f..66becb84c5b66 100644 --- a/csharp/test/Apache.Arrow.Tests/Apache.Arrow.Tests.csproj +++ b/csharp/test/Apache.Arrow.Tests/Apache.Arrow.Tests.csproj @@ -14,8 +14,8 @@ - - + + all runtime; build; native; contentfiles; analyzers diff --git a/csharp/test/Apache.Arrow.Tests/ArrowStreamWriterTests.cs b/csharp/test/Apache.Arrow.Tests/ArrowStreamWriterTests.cs index 89595f99dc0e4..c4c0b6ec9ff21 100644 --- a/csharp/test/Apache.Arrow.Tests/ArrowStreamWriterTests.cs +++ b/csharp/test/Apache.Arrow.Tests/ArrowStreamWriterTests.cs @@ -541,6 +541,10 @@ public async Task WriteMultipleDictionaryArraysAsync() public void WriteMultipleDictionaryArrays() { List originalRecordBatches = CreateMultipleDictionaryArraysTestData(); + Assert.Equal("RecordBatch: 10 columns by 3 rows", originalRecordBatches[0].ToString()); + Assert.Equal("Schema: Num fields=10, Num metadata=0", originalRecordBatches[0].Schema.ToString()); + Assert.Equal("Field: Name=dictionaryField_int8, DataType=dictionary, IsNullable=False, Metadata count=0", + originalRecordBatches[0].Schema.FieldsLookup["dictionaryField_int8"].Single().ToString()); TestRoundTripRecordBatches(originalRecordBatches); } diff --git a/csharp/test/Apache.Arrow.Tests/Decimal128ArrayTests.cs b/csharp/test/Apache.Arrow.Tests/Decimal128ArrayTests.cs index 4c4e6537269a4..497c9d2f6c6af 100644 --- a/csharp/test/Apache.Arrow.Tests/Decimal128ArrayTests.cs +++ b/csharp/test/Apache.Arrow.Tests/Decimal128ArrayTests.cs @@ -14,7 +14,10 @@ // limitations under the License. using System; -using System.Collections.Generic; +#if !NETSTANDARD1_3 +using System.Data.SqlTypes; +using System.Linq; +#endif using Apache.Arrow.Types; using Xunit; @@ -22,6 +25,18 @@ namespace Apache.Arrow.Tests { public class Decimal128ArrayTests { +#if !NETSTANDARD1_3 + static SqlDecimal? Convert(decimal? value) + { + return value == null ? null : new SqlDecimal(value.Value); + } + + static decimal? Convert(SqlDecimal? value) + { + return value == null ? null : value.Value.Value; + } +#endif + public class Builder { public class AppendNull @@ -30,7 +45,7 @@ public class AppendNull public void AppendThenGetGivesNull() { // Arrange - var builder = new Decimal128Array.Builder(new Decimal128Type(8,2)); + var builder = new Decimal128Array.Builder(new Decimal128Type(8, 2)); // Act @@ -45,6 +60,12 @@ public void AppendThenGetGivesNull() Assert.Null(array.GetValue(0)); Assert.Null(array.GetValue(1)); Assert.Null(array.GetValue(2)); + +#if !NETSTANDARD1_3 + Assert.Null(array.GetSqlDecimal(0)); + Assert.Null(array.GetSqlDecimal(1)); + Assert.Null(array.GetSqlDecimal(2)); +#endif } } @@ -67,7 +88,7 @@ public void AppendDecimal(int count) testData[i] = null; continue; } - decimal rnd = i * (decimal)Math.Round(new Random().NextDouble(),10); + decimal rnd = i * (decimal)Math.Round(new Random().NextDouble(), 10); testData[i] = rnd; builder.Append(rnd); } @@ -78,6 +99,9 @@ public void AppendDecimal(int count) for (int i = 0; i < count; i++) { Assert.Equal(testData[i], array.GetValue(i)); +#if !NETSTANDARD1_3 + Assert.Equal(Convert(testData[i]), array.GetSqlDecimal(i)); +#endif } } @@ -95,6 +119,11 @@ public void AppendLargeDecimal() var array = builder.Build(); Assert.Equal(large, array.GetValue(0)); Assert.Equal(-large, array.GetValue(1)); + +#if !NETSTANDARD1_3 + Assert.Equal(Convert(large), array.GetSqlDecimal(0)); + Assert.Equal(Convert(-large), array.GetSqlDecimal(1)); +#endif } [Fact] @@ -115,6 +144,13 @@ public void AppendMaxAndMinDecimal() Assert.Equal(Decimal.MinValue, array.GetValue(1)); Assert.Equal(Decimal.MaxValue - 10, array.GetValue(2)); Assert.Equal(Decimal.MinValue + 10, array.GetValue(3)); + +#if !NETSTANDARD1_3 + Assert.Equal(Convert(Decimal.MaxValue), array.GetSqlDecimal(0)); + Assert.Equal(Convert(Decimal.MinValue), array.GetSqlDecimal(1)); + Assert.Equal(Convert(Decimal.MaxValue) - 10, array.GetSqlDecimal(2)); + Assert.Equal(Convert(Decimal.MinValue) + 10, array.GetSqlDecimal(3)); +#endif } [Fact] @@ -131,6 +167,11 @@ public void AppendFractionalDecimal() var array = builder.Build(); Assert.Equal(fraction, array.GetValue(0)); Assert.Equal(-fraction, array.GetValue(1)); + +#if !NETSTANDARD1_3 + Assert.Equal(Convert(fraction), array.GetSqlDecimal(0)); + Assert.Equal(Convert(-fraction), array.GetSqlDecimal(1)); +#endif } [Fact] @@ -138,7 +179,7 @@ public void AppendRangeDecimal() { // Arrange var builder = new Decimal128Array.Builder(new Decimal128Type(24, 8)); - var range = new decimal[] {2.123M, 1.5984M, -0.0000001M, 9878987987987987.1235407M}; + var range = new decimal[] { 2.123M, 1.5984M, -0.0000001M, 9878987987987987.1235407M }; // Act builder.AppendRange(range); @@ -146,12 +187,15 @@ public void AppendRangeDecimal() // Assert var array = builder.Build(); - for(int i = 0; i < range.Length; i ++) + for (int i = 0; i < range.Length; i++) { Assert.Equal(range[i], array.GetValue(i)); +#if !NETSTANDARD1_3 + Assert.Equal(Convert(range[i]), array.GetSqlDecimal(i)); +#endif } - - Assert.Null( array.GetValue(range.Length)); + + Assert.Null(array.GetValue(range.Length)); } [Fact] @@ -159,7 +203,7 @@ public void AppendClearAppendDecimal() { // Arrange var builder = new Decimal128Array.Builder(new Decimal128Type(24, 8)); - + // Act builder.Append(1); builder.Clear(); @@ -256,6 +300,174 @@ public void SwapNull() Assert.Equal(123.456M, array.GetValue(1)); } } + +#if !NETSTANDARD1_3 + public class SqlDecimals + { + [Theory] + [InlineData(200)] + public void AppendSqlDecimal(int count) + { + // Arrange + const int precision = 10; + var builder = new Decimal128Array.Builder(new Decimal128Type(14, precision)); + + // Act + SqlDecimal?[] testData = new SqlDecimal?[count]; + for (int i = 0; i < count; i++) + { + if (i == count - 2) + { + builder.AppendNull(); + testData[i] = null; + continue; + } + SqlDecimal rnd = i * (SqlDecimal)Math.Round(new Random().NextDouble(), 10); + builder.Append(rnd); + testData[i] = SqlDecimal.Round(rnd, precision); + } + + // Assert + var array = builder.Build(); + Assert.Equal(count, array.Length); + for (int i = 0; i < count; i++) + { + Assert.Equal(testData[i], array.GetSqlDecimal(i)); + Assert.Equal(Convert(testData[i]), array.GetValue(i)); + } + } + + [Fact] + public void AppendMaxAndMinSqlDecimal() + { + // Arrange + var builder = new Decimal128Array.Builder(new Decimal128Type(38, 0)); + + // Act + builder.Append(SqlDecimal.MaxValue); + builder.Append(SqlDecimal.MinValue); + builder.Append(SqlDecimal.MaxValue - 10); + builder.Append(SqlDecimal.MinValue + 10); + + // Assert + var array = builder.Build(); + Assert.Equal(SqlDecimal.MaxValue, array.GetSqlDecimal(0)); + Assert.Equal(SqlDecimal.MinValue, array.GetSqlDecimal(1)); + Assert.Equal(SqlDecimal.MaxValue - 10, array.GetSqlDecimal(2)); + Assert.Equal(SqlDecimal.MinValue + 10, array.GetSqlDecimal(3)); + } + + [Fact] + public void AppendRangeSqlDecimal() + { + // Arrange + var builder = new Decimal128Array.Builder(new Decimal128Type(24, 8)); + var range = new SqlDecimal[] { 2.123M, 1.5984M, -0.0000001M, 9878987987987987.1235407M }; + + // Act + builder.AppendRange(range); + builder.AppendNull(); + + // Assert + var array = builder.Build(); + for (int i = 0; i < range.Length; i++) + { + Assert.Equal(range[i], array.GetSqlDecimal(i)); + Assert.Equal(Convert(range[i]), array.GetValue(i)); + } + + Assert.Null(array.GetValue(range.Length)); + } + } + + public class Strings + { + [Theory] + [InlineData(200)] + public void AppendString(int count) + { + // Arrange + const int precision = 10; + var builder = new Decimal128Array.Builder(new Decimal128Type(14, precision)); + + // Act + string[] testData = new string[count]; + for (int i = 0; i < count; i++) + { + if (i == count - 2) + { + builder.AppendNull(); + testData[i] = null; + continue; + } + SqlDecimal rnd = i * (SqlDecimal)Math.Round(new Random().NextDouble(), 10); + builder.Append(rnd); + testData[i] = SqlDecimal.Round(rnd, precision).ToString(); + } + + // Assert + var array = builder.Build(); + Assert.Equal(count, array.Length); + for (int i = 0; i < count; i++) + { + if (testData[i] == null) + { + Assert.Null(array.GetString(i)); + Assert.Null(array.GetSqlDecimal(i)); + } + else + { + Assert.Equal(testData[i].TrimEnd('0'), array.GetString(i).TrimEnd('0')); + Assert.Equal(SqlDecimal.Parse(testData[i]), array.GetSqlDecimal(i)); + } + } + } + + [Fact] + public void AppendMaxAndMinSqlDecimal() + { + // Arrange + var builder = new Decimal128Array.Builder(new Decimal128Type(38, 0)); + + // Act + builder.Append(SqlDecimal.MaxValue.ToString()); + builder.Append(SqlDecimal.MinValue.ToString()); + string maxMinusTen = (SqlDecimal.MaxValue - 10).ToString(); + string minPlusTen = (SqlDecimal.MinValue + 10).ToString(); + builder.Append(maxMinusTen); + builder.Append(minPlusTen); + + // Assert + var array = builder.Build(); + Assert.Equal(SqlDecimal.MaxValue.ToString(), array.GetString(0)); + Assert.Equal(SqlDecimal.MinValue.ToString(), array.GetString(1)); + Assert.Equal(maxMinusTen, array.GetString(2)); + Assert.Equal(minPlusTen, array.GetString(3)); + } + + [Fact] + public void AppendRangeSqlDecimal() + { + // Arrange + var builder = new Decimal128Array.Builder(new Decimal128Type(24, 8)); + var range = new SqlDecimal[] { 2.123M, 1.5984M, -0.0000001M, 9878987987987987.1235407M }; + + // Act + builder.AppendRange(range.Select(d => d.ToString())); + builder.AppendNull(); + + // Assert + var array = builder.Build(); + for (int i = 0; i < range.Length; i++) + { + Assert.Equal(range[i], array.GetSqlDecimal(i)); + Assert.Equal(range[i].ToString(), array.GetString(i).TrimEnd('0')); + } + + Assert.Null(array.GetValue(range.Length)); + } + } +#endif } } } diff --git a/csharp/test/Apache.Arrow.Tests/Decimal256ArrayTests.cs b/csharp/test/Apache.Arrow.Tests/Decimal256ArrayTests.cs index e63c39d24eea8..3924c73a4e2f7 100644 --- a/csharp/test/Apache.Arrow.Tests/Decimal256ArrayTests.cs +++ b/csharp/test/Apache.Arrow.Tests/Decimal256ArrayTests.cs @@ -14,7 +14,10 @@ // limitations under the License. using System; -using System.Collections.Generic; +#if !NETSTANDARD1_3 +using System.Data.SqlTypes; +using System.Linq; +#endif using Apache.Arrow.Types; using Xunit; @@ -22,6 +25,25 @@ namespace Apache.Arrow.Tests { public class Decimal256ArrayTests { +#if !NETSTANDARD1_3 + static SqlDecimal? GetSqlDecimal(Decimal256Array array, int index) + { + SqlDecimal? result; + Assert.True(array.TryGetSqlDecimal(index, out result)); + return result; + } + + static SqlDecimal? Convert(decimal? value) + { + return value == null ? null : new SqlDecimal(value.Value); + } + + static decimal? Convert(SqlDecimal? value) + { + return value == null ? null : value.Value.Value; + } +#endif + public class Builder { public class AppendNull @@ -45,6 +67,12 @@ public void AppendThenGetGivesNull() Assert.Null(array.GetValue(0)); Assert.Null(array.GetValue(1)); Assert.Null(array.GetValue(2)); + +#if !NETSTANDARD1_3 + Assert.Null(GetSqlDecimal(array, 0)); + Assert.Null(GetSqlDecimal(array, 1)); + Assert.Null(GetSqlDecimal(array, 2)); +#endif } } @@ -78,6 +106,9 @@ public void AppendDecimal(int count) for (int i = 0; i < count; i++) { Assert.Equal(testData[i], array.GetValue(i)); +#if !NETSTANDARD1_3 + Assert.Equal(Convert(testData[i]), GetSqlDecimal(array, i)); +#endif } } @@ -95,6 +126,11 @@ public void AppendLargeDecimal() var array = builder.Build(); Assert.Equal(large, array.GetValue(0)); Assert.Equal(-large, array.GetValue(1)); + +#if !NETSTANDARD1_3 + Assert.Equal(Convert(large), GetSqlDecimal(array, 0)); + Assert.Equal(Convert(-large), GetSqlDecimal(array, 1)); +#endif } [Fact] @@ -115,6 +151,13 @@ public void AppendMaxAndMinDecimal() Assert.Equal(Decimal.MinValue, array.GetValue(1)); Assert.Equal(Decimal.MaxValue - 10, array.GetValue(2)); Assert.Equal(Decimal.MinValue + 10, array.GetValue(3)); + +#if !NETSTANDARD1_3 + Assert.Equal(Convert(Decimal.MaxValue), GetSqlDecimal(array, 0)); + Assert.Equal(Convert(Decimal.MinValue), GetSqlDecimal(array, 1)); + Assert.Equal(Convert(Decimal.MaxValue) - 10, GetSqlDecimal(array, 2)); + Assert.Equal(Convert(Decimal.MinValue) + 10, GetSqlDecimal(array, 3)); +#endif } [Fact] @@ -131,6 +174,11 @@ public void AppendFractionalDecimal() var array = builder.Build(); Assert.Equal(fraction, array.GetValue(0)); Assert.Equal(-fraction, array.GetValue(1)); + +#if !NETSTANDARD1_3 + Assert.Equal(Convert(fraction), GetSqlDecimal(array, 0)); + Assert.Equal(Convert(-fraction), GetSqlDecimal(array, 1)); +#endif } [Fact] @@ -149,8 +197,11 @@ public void AppendRangeDecimal() for(int i = 0; i < range.Length; i ++) { Assert.Equal(range[i], array.GetValue(i)); +#if !NETSTANDARD1_3 + Assert.Equal(Convert(range[i]), GetSqlDecimal(array, i)); +#endif } - + Assert.Null( array.GetValue(range.Length)); } @@ -256,6 +307,174 @@ public void SwapNull() Assert.Equal(123.456M, array.GetValue(1)); } } + +#if !NETSTANDARD1_3 + public class SqlDecimals + { + [Theory] + [InlineData(200)] + public void AppendSqlDecimal(int count) + { + // Arrange + const int precision = 10; + var builder = new Decimal256Array.Builder(new Decimal256Type(14, precision)); + + // Act + SqlDecimal?[] testData = new SqlDecimal?[count]; + for (int i = 0; i < count; i++) + { + if (i == count - 2) + { + builder.AppendNull(); + testData[i] = null; + continue; + } + SqlDecimal rnd = i * (SqlDecimal)Math.Round(new Random().NextDouble(), 10); + builder.Append(rnd); + testData[i] = SqlDecimal.Round(rnd, precision); + } + + // Assert + var array = builder.Build(); + Assert.Equal(count, array.Length); + for (int i = 0; i < count; i++) + { + Assert.Equal(testData[i], GetSqlDecimal(array, i)); + Assert.Equal(Convert(testData[i]), array.GetValue(i)); + } + } + + [Fact] + public void AppendMaxAndMinSqlDecimal() + { + // Arrange + var builder = new Decimal256Array.Builder(new Decimal256Type(38, 0)); + + // Act + builder.Append(SqlDecimal.MaxValue); + builder.Append(SqlDecimal.MinValue); + builder.Append(SqlDecimal.MaxValue - 10); + builder.Append(SqlDecimal.MinValue + 10); + + // Assert + var array = builder.Build(); + Assert.Equal(SqlDecimal.MaxValue, GetSqlDecimal(array, 0)); + Assert.Equal(SqlDecimal.MinValue, GetSqlDecimal(array, 1)); + Assert.Equal(SqlDecimal.MaxValue - 10, GetSqlDecimal(array, 2)); + Assert.Equal(SqlDecimal.MinValue + 10, GetSqlDecimal(array, 3)); + } + + [Fact] + public void AppendRangeSqlDecimal() + { + // Arrange + var builder = new Decimal256Array.Builder(new Decimal256Type(24, 8)); + var range = new SqlDecimal[] { 2.123M, 1.5984M, -0.0000001M, 9878987987987987.1235407M }; + + // Act + builder.AppendRange(range); + builder.AppendNull(); + + // Assert + var array = builder.Build(); + for (int i = 0; i < range.Length; i++) + { + Assert.Equal(range[i], GetSqlDecimal(array, i)); + Assert.Equal(Convert(range[i]), array.GetValue(i)); + } + + Assert.Null(array.GetValue(range.Length)); + } + } + + public class Strings + { + [Theory] + [InlineData(200)] + public void AppendString(int count) + { + // Arrange + const int precision = 10; + var builder = new Decimal256Array.Builder(new Decimal256Type(14, precision)); + + // Act + string[] testData = new string[count]; + for (int i = 0; i < count; i++) + { + if (i == count - 2) + { + builder.AppendNull(); + testData[i] = null; + continue; + } + SqlDecimal rnd = i * (SqlDecimal)Math.Round(new Random().NextDouble(), 10); + builder.Append(rnd); + testData[i] = SqlDecimal.Round(rnd, precision).ToString(); + } + + // Assert + var array = builder.Build(); + Assert.Equal(count, array.Length); + for (int i = 0; i < count; i++) + { + if (testData[i] == null) + { + Assert.Null(array.GetString(i)); + Assert.Null(GetSqlDecimal(array, i)); + } + else + { + Assert.Equal(testData[i].TrimEnd('0'), array.GetString(i).TrimEnd('0')); + Assert.Equal(SqlDecimal.Parse(testData[i]), GetSqlDecimal(array, i)); + } + } + } + + [Fact] + public void AppendMaxAndMinSqlDecimal() + { + // Arrange + var builder = new Decimal256Array.Builder(new Decimal256Type(38, 0)); + + // Act + builder.Append(SqlDecimal.MaxValue.ToString()); + builder.Append(SqlDecimal.MinValue.ToString()); + string maxMinusTen = (SqlDecimal.MaxValue - 10).ToString(); + string minPlusTen = (SqlDecimal.MinValue + 10).ToString(); + builder.Append(maxMinusTen); + builder.Append(minPlusTen); + + // Assert + var array = builder.Build(); + Assert.Equal(SqlDecimal.MaxValue.ToString(), array.GetString(0)); + Assert.Equal(SqlDecimal.MinValue.ToString(), array.GetString(1)); + Assert.Equal(maxMinusTen, array.GetString(2)); + Assert.Equal(minPlusTen, array.GetString(3)); + } + + [Fact] + public void AppendRangeSqlDecimal() + { + // Arrange + var builder = new Decimal256Array.Builder(new Decimal256Type(24, 8)); + var range = new SqlDecimal[] { 2.123M, 1.5984M, -0.0000001M, 9878987987987987.1235407M }; + + // Act + builder.AppendRange(range.Select(d => d.ToString())); + builder.AppendNull(); + + // Assert + var array = builder.Build(); + for (int i = 0; i < range.Length; i++) + { + Assert.Equal(range[i], GetSqlDecimal(array, i)); + Assert.Equal(range[i].ToString().TrimEnd('0'), array.GetString(i).TrimEnd('0')); + } + + Assert.Null(array.GetValue(range.Length)); + } + } +#endif } } } diff --git a/csharp/test/Apache.Arrow.Tests/DecimalUtilityTests.cs b/csharp/test/Apache.Arrow.Tests/DecimalUtilityTests.cs index 9c7e5b587cb9d..677e9b6cadfcf 100644 --- a/csharp/test/Apache.Arrow.Tests/DecimalUtilityTests.cs +++ b/csharp/test/Apache.Arrow.Tests/DecimalUtilityTests.cs @@ -14,6 +14,9 @@ // limitations under the License. using System; +#if !NETSTANDARD1_3 +using System.Data.SqlTypes; +#endif using Apache.Arrow.Types; using Xunit; @@ -31,13 +34,13 @@ public class Overflow [InlineData(100.123, 5, 2, true)] [InlineData(100.123, 5, 3, true)] [InlineData(100.123, 6, 3, false)] - public void HasExpectedResultOrThrows(decimal d, int precision , int scale, bool shouldThrow) + public void HasExpectedResultOrThrows(decimal d, int precision, int scale, bool shouldThrow) { var builder = new Decimal128Array.Builder(new Decimal128Type(precision, scale)); if (shouldThrow) { - Assert.Throws(() => builder.Append(d)); + Assert.Throws(() => builder.Append(d)); } else { @@ -55,7 +58,7 @@ public void Decimal256HasExpectedResultOrThrows(decimal d, int precision, int sc var builder = new Decimal256Array.Builder(new Decimal256Type(precision, scale)); builder.Append(d); Decimal256Array result = builder.Build(new TestMemoryAllocator()); ; - + if (shouldThrow) { Assert.Throws(() => result.GetValue(0)); @@ -66,5 +69,142 @@ public void Decimal256HasExpectedResultOrThrows(decimal d, int precision, int sc } } } + + public class SqlDecimals + { + +#if !NETSTANDARD1_3 + [Fact] + public void NegativeSqlDecimal() + { + const int precision = 38; + const int scale = 0; + const int bitWidth = 16; + + var negative = new SqlDecimal(precision, scale, false, 0, 0, 1, 0); + var bytes = new byte[16]; + DecimalUtility.GetBytes(negative.Value, precision, scale, bitWidth, bytes); + var sqlNegative = DecimalUtility.GetSqlDecimal128(new ArrowBuffer(bytes), 0, precision, scale); + Assert.Equal(negative, sqlNegative); + + DecimalUtility.GetBytes(sqlNegative, precision, scale, bytes); + var decimalNegative = DecimalUtility.GetDecimal(new ArrowBuffer(bytes), 0, scale, bitWidth); + Assert.Equal(negative.Value, decimalNegative); + } + + [Fact] + public void LargeScale() + { + string digits = "1.2345678901234567890123456789012345678"; + + var positive = SqlDecimal.Parse(digits); + Assert.Equal(38, positive.Precision); + Assert.Equal(37, positive.Scale); + + var bytes = new byte[16]; + DecimalUtility.GetBytes(positive, positive.Precision, positive.Scale, bytes); + var sqlPositive = DecimalUtility.GetSqlDecimal128(new ArrowBuffer(bytes), 0, positive.Precision, positive.Scale); + + Assert.Equal(positive, sqlPositive); + Assert.Equal(digits, sqlPositive.ToString()); + + digits = "-" + digits; + var negative = SqlDecimal.Parse(digits); + Assert.Equal(38, positive.Precision); + Assert.Equal(37, positive.Scale); + + DecimalUtility.GetBytes(negative, negative.Precision, negative.Scale, bytes); + var sqlNegative = DecimalUtility.GetSqlDecimal128(new ArrowBuffer(bytes), 0, negative.Precision, negative.Scale); + + Assert.Equal(negative, sqlNegative); + Assert.Equal(digits, sqlNegative.ToString()); + } +#endif + } + + public class Strings + { + [Theory] + [InlineData(100.12, 10, 2, "100.12")] + [InlineData(100.12, 8, 3, "100.120")] + [InlineData(100.12, 7, 4, "100.1200")] + [InlineData(.12, 6, 3, "0.120")] + [InlineData(.0012, 5, 4, "0.0012")] + [InlineData(-100.12, 10, 2, "-100.12")] + [InlineData(-100.12, 8, 3, "-100.120")] + [InlineData(-100.12, 7, 4, "-100.1200")] + [InlineData(-.12, 6, 3, "-0.120")] + [InlineData(-.0012, 5, 4, "-0.0012")] + [InlineData(7.89, 76, 38, "7.89000000000000000000000000000000000000")] + public void FromDecimal(decimal d, int precision, int scale, string result) + { + if (precision <= 38) + { + TestFromDecimal(d, precision, scale, 16, result); + } + TestFromDecimal(d, precision, scale, 32, result); + } + + private void TestFromDecimal(decimal d, int precision, int scale, int byteWidth, string result) + { + var bytes = new byte[byteWidth]; + DecimalUtility.GetBytes(d, precision, scale, byteWidth, bytes); + Assert.Equal(result, DecimalUtility.GetString(new ArrowBuffer(bytes), 0, precision, scale, byteWidth)); + } + + [Theory] + [InlineData("100.12", 10, 2, "100.12")] + [InlineData("100.12", 8, 3, "100.120")] + [InlineData("100.12", 7, 4, "100.1200")] + [InlineData(".12", 6, 3, "0.120")] + [InlineData(".0012", 5, 4, "0.0012")] + [InlineData("-100.12", 10, 2, "-100.12")] + [InlineData("-100.12", 8, 3, "-100.120")] + [InlineData("-100.12", 7, 4, "-100.1200")] + [InlineData("-.12", 6, 3, "-0.120")] + [InlineData("-.0012", 5, 4, "-0.0012")] + [InlineData("+.0012", 5, 4, "0.0012")] + [InlineData("99999999999999999999999999999999999999", 38, 0, "99999999999999999999999999999999999999")] + [InlineData("-99999999999999999999999999999999999999", 38, 0, "-99999999999999999999999999999999999999")] + public void FromString(string s, int precision, int scale, string result) + { + TestFromString(s, precision, scale, 16, result); + TestFromString(s, precision, scale, 32, result); + } + + [Fact] + public void ThroughDecimal256() + { + var seventysix = new string('9', 76); + TestFromString(seventysix, 76, 0, 32, seventysix); + TestFromString("0000" + seventysix, 76, 0, 32, seventysix); + + seventysix = "-" + seventysix; + TestFromString(seventysix, 76, 0, 32, seventysix); + + var seventyseven = new string('9', 77); + Assert.Throws(() => TestFromString(seventyseven, 76, 0, 32, seventyseven)); + } + + private void TestFromString(string s, int precision, int scale, int byteWidth, string result) + { + var bytes = new byte[byteWidth]; + DecimalUtility.GetBytes(s, precision, scale, byteWidth, bytes); + Assert.Equal(result, DecimalUtility.GetString(new ArrowBuffer(bytes), 0, precision, scale, byteWidth)); + } + + [Theory] + [InlineData("", 10, 2, 16, typeof(ArgumentException))] + [InlineData("", 10, 2, 32, typeof(ArgumentException))] + [InlineData(null, 10, 2, 32, typeof(ArgumentException))] + [InlineData("1.23", 10, 1, 16, typeof(OverflowException))] + [InlineData("12345678901234567890", 24, 1, 8, typeof(OverflowException))] + [InlineData("abc", 24, 1, 8, typeof(ArgumentException))] + public void ParseErrors(string s, int precision, int scale, int byteWidth, Type exceptionType) + { + byte[] bytes = new byte[byteWidth]; + Assert.Throws(exceptionType, () => DecimalUtility.GetBytes(s, precision, scale, byteWidth, bytes)); + } + } } } diff --git a/csharp/test/Apache.Arrow.Tests/TableTests.cs b/csharp/test/Apache.Arrow.Tests/TableTests.cs index 234dd63a79cd2..9e23fa99a769c 100644 --- a/csharp/test/Apache.Arrow.Tests/TableTests.cs +++ b/csharp/test/Apache.Arrow.Tests/TableTests.cs @@ -49,6 +49,8 @@ public void TestTableBasics() Table table = MakeTableWithOneColumnOfTwoIntArrays(10); Assert.Equal(20, table.RowCount); Assert.Equal(1, table.ColumnCount); + Assert.Equal("Table: 1 columns by 20 rows", table.ToString()); + Assert.Equal("ChunkedArray: Length=20, DataType=int32", table.Column(0).Data.ToString()); } [Fact] @@ -61,6 +63,7 @@ public void TestTableFromRecordBatches() Table table1 = Table.TableFromRecordBatches(recordBatch1.Schema, recordBatches); Assert.Equal(20, table1.RowCount); Assert.Equal(27, table1.ColumnCount); + Assert.Equal("ChunkedArray: Length=20, DataType=list", table1.Column(0).Data.ToString()); FixedSizeBinaryType type = new FixedSizeBinaryType(17); Field newField1 = new Field(type.Name, type, false); @@ -83,6 +86,9 @@ public void TestTableFromRecordBatches() public void TestTableAddRemoveAndSetColumn() { Table table = MakeTableWithOneColumnOfTwoIntArrays(10); + Assert.Equal("Table: 1 columns by 20 rows", table.ToString()); + Assert.Equal("Field: Name=f0, DataType=int32, IsNullable=True, Metadata count=0", table.Column(0).Field.ToString()); + Assert.Equal("ChunkedArray: Length=20, DataType=int32", table.Column(0).Data.ToString()); Array nonEqualLengthIntArray = ColumnTests.MakeIntArray(10); Field field1 = new Field.Builder().Name("f1").DataType(Int32Type.Default).Build(); diff --git a/dev/archery/archery/integration/datagen.py b/dev/archery/archery/integration/datagen.py index 7635cfd98feda..ff10c0bb03fb6 100644 --- a/dev/archery/archery/integration/datagen.py +++ b/dev/archery/archery/integration/datagen.py @@ -1520,8 +1520,7 @@ def generate_decimal128_case(): for i, precision in enumerate(range(3, 39)) ] - possible_batch_sizes = 7, 10 - batch_sizes = [possible_batch_sizes[i % 2] for i in range(len(fields))] + batch_sizes = [7, 10] # 'decimal' is the original name for the test, and it must match # provide "gold" files that test backwards compatibility, so they # can be appropriately skipped. @@ -1535,8 +1534,7 @@ def generate_decimal256_case(): for i, precision in enumerate(range(37, 70)) ] - possible_batch_sizes = 7, 10 - batch_sizes = [possible_batch_sizes[i % 2] for i in range(len(fields))] + batch_sizes = [7, 10] return _generate_file('decimal256', fields, batch_sizes) @@ -1856,12 +1854,13 @@ def _temp_path(): .skip_tester('Rust'), generate_binary_view_case() - .skip_tester('C++') .skip_tester('C#') .skip_tester('Go') .skip_tester('Java') .skip_tester('JS') - .skip_tester('Rust'), + .skip_tester('Rust') + .skip_format(SKIP_C_SCHEMA, 'C++') + .skip_format(SKIP_C_ARRAY, 'C++'), generate_extension_case() .skip_tester('C#') diff --git a/dev/archery/archery/integration/runner.py b/dev/archery/archery/integration/runner.py index 841633f94cdba..bab00e6d70d4a 100644 --- a/dev/archery/archery/integration/runner.py +++ b/dev/archery/archery/integration/runner.py @@ -615,7 +615,7 @@ def run_all_tests(with_cpp=True, with_java=True, with_js=True, Scenario( "app_metadata_flight_info_endpoint", description="Ensure support FlightInfo and Endpoint app_metadata", - skip_testers={"JS", "C#", "Rust", "Java"} + skip_testers={"JS", "C#", "Rust"} ), Scenario( "flight_sql", diff --git a/dev/archery/archery/integration/tester.py b/dev/archery/archery/integration/tester.py index eadb953a61b50..7de8f73c1398c 100644 --- a/dev/archery/archery/integration/tester.py +++ b/dev/archery/archery/integration/tester.py @@ -225,11 +225,12 @@ def __init__(self, debug=False, **args): self.args = args self.debug = debug - def run_shell_command(self, cmd): + def run_shell_command(self, cmd, **kwargs): cmd = ' '.join(cmd) if self.debug: log(cmd) - subprocess.check_call(cmd, shell=True) + kwargs.update(shell=True) + subprocess.check_call(cmd, **kwargs) def json_to_file(self, json_path, arrow_path): """ diff --git a/dev/archery/archery/integration/tester_js.py b/dev/archery/archery/integration/tester_js.py index 6544a1fc6cc3c..c7f363ba54687 100644 --- a/dev/archery/archery/integration/tester_js.py +++ b/dev/archery/archery/integration/tester_js.py @@ -22,11 +22,12 @@ from ..utils.source import ARROW_ROOT_DEFAULT -_EXE_PATH = os.path.join(ARROW_ROOT_DEFAULT, 'js/bin') -_VALIDATE = os.path.join(_EXE_PATH, 'integration.js') -_JSON_TO_ARROW = os.path.join(_EXE_PATH, 'json-to-arrow.js') -_STREAM_TO_FILE = os.path.join(_EXE_PATH, 'stream-to-file.js') -_FILE_TO_STREAM = os.path.join(_EXE_PATH, 'file-to-stream.js') +ARROW_JS_ROOT = os.path.join(ARROW_ROOT_DEFAULT, 'js') +_EXE_PATH = os.path.join(ARROW_JS_ROOT, 'bin') +_VALIDATE = os.path.join(_EXE_PATH, 'integration.ts') +_JSON_TO_ARROW = os.path.join(_EXE_PATH, 'json-to-arrow.ts') +_STREAM_TO_FILE = os.path.join(_EXE_PATH, 'stream-to-file.ts') +_FILE_TO_STREAM = os.path.join(_EXE_PATH, 'file-to-stream.ts') class JSTester(Tester): @@ -50,26 +51,25 @@ def _run(self, exe_cmd, arrow_path=None, json_path=None, if self.debug: log(' '.join(cmd)) - run_cmd(cmd) + run_cmd(cmd, cwd=ARROW_JS_ROOT) def validate(self, json_path, arrow_path, quirks=None): return self._run(_VALIDATE, arrow_path, json_path, 'VALIDATE') def json_to_file(self, json_path, arrow_path): - cmd = ['node', - '--no-warnings', _JSON_TO_ARROW, + cmd = [_JSON_TO_ARROW, '-a', arrow_path, '-j', json_path] - self.run_shell_command(cmd) + self.run_shell_command(cmd, cwd=ARROW_JS_ROOT) def stream_to_file(self, stream_path, file_path): - cmd = ['node', '--no-warnings', _STREAM_TO_FILE, + cmd = [_STREAM_TO_FILE, '<', stream_path, '>', file_path] - self.run_shell_command(cmd) + self.run_shell_command(cmd, cwd=ARROW_JS_ROOT) def file_to_stream(self, file_path, stream_path): - cmd = ['node', '--no-warnings', _FILE_TO_STREAM, + cmd = [_FILE_TO_STREAM, '<', file_path, '>', stream_path] - self.run_shell_command(cmd) + self.run_shell_command(cmd, cwd=ARROW_JS_ROOT) diff --git a/dev/archery/archery/integration/util.py b/dev/archery/archery/integration/util.py index afef7d5eb13b9..1b1eb95a1d296 100644 --- a/dev/archery/archery/integration/util.py +++ b/dev/archery/archery/integration/util.py @@ -127,12 +127,13 @@ def frombytes(o): return o -def run_cmd(cmd): +def run_cmd(cmd, **kwargs): if isinstance(cmd, str): cmd = cmd.split(' ') try: - output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) + kwargs.update(stderr=subprocess.STDOUT) + output = subprocess.check_output(cmd, **kwargs) except subprocess.CalledProcessError as e: # this avoids hiding the stdout / stderr of failed processes sio = io.StringIO() diff --git a/dev/release/binary-task.rb b/dev/release/binary-task.rb index e7b618ba3348d..519c8339a4dba 100644 --- a/dev/release/binary-task.rb +++ b/dev/release/binary-task.rb @@ -1089,6 +1089,7 @@ def available_apt_targets ["ubuntu", "focal", "main"], ["ubuntu", "jammy", "main"], ["ubuntu", "lunar", "main"], + ["ubuntu", "mantic", "main"], ] end diff --git a/dev/release/post-03-website.sh b/dev/release/post-03-website.sh index cb605aee83523..83dc157b346e5 100755 --- a/dev/release/post-03-website.sh +++ b/dev/release/post-03-website.sh @@ -258,7 +258,7 @@ current: number: '${version}' pinned_number: '${pinned_version}' major_number: '${major_version}' - date: '${release_date_iso8601}' + date: ${release_date_iso8601} git-tag: '${git_tag_hash}' github-tag-link: 'https://github.com/apache/arrow/releases/tag/${git_tag}' release-notes: 'https://arrow.apache.org/release/${version}.html' diff --git a/dev/release/verify-release-candidate.sh b/dev/release/verify-release-candidate.sh index 287c557fb5ef4..05a7498a85180 100755 --- a/dev/release/verify-release-candidate.sh +++ b/dev/release/verify-release-candidate.sh @@ -196,7 +196,9 @@ test_apt() { "ubuntu:jammy" \ "arm64v8/ubuntu:jammy" \ "ubuntu:lunar" \ - "arm64v8/ubuntu:lunar"; do \ + "arm64v8/ubuntu:lunar" \ + "ubuntu:mantic" \ + "arm64v8/ubuntu:mantic"; do \ case "${target}" in arm64v8/*) if [ "$(arch)" = "aarch64" -o -e /usr/bin/qemu-aarch64-static ]; then @@ -962,7 +964,7 @@ ensure_source_directory() { # Ensure that the testing repositories are prepared if [ ! -d ${ARROW_SOURCE_DIR}/testing/data ]; then if [ -d ${SOURCE_DIR}/../../testing/data ]; then - cp -a ${SOURCE_DIR}/../../testing/ ${ARROW_SOURCE_DIR}/ + cp -a ${SOURCE_DIR}/../../testing ${ARROW_SOURCE_DIR}/ else git clone \ https://github.com/apache/arrow-testing.git \ @@ -972,7 +974,7 @@ ensure_source_directory() { if [ ! -d ${ARROW_SOURCE_DIR}/cpp/submodules/parquet-testing/data ]; then if [ -d ${SOURCE_DIR}/../../cpp/submodules/parquet-testing/data ]; then cp -a \ - ${SOURCE_DIR}/../../cpp/submodules/parquet-testing/ \ + ${SOURCE_DIR}/../../cpp/submodules/parquet-testing \ ${ARROW_SOURCE_DIR}/cpp/submodules/ else git clone \ diff --git a/dev/tasks/homebrew-formulae/apache-arrow-glib.rb b/dev/tasks/homebrew-formulae/apache-arrow-glib.rb index c31ef267d73c5..e29354def4c1c 100644 --- a/dev/tasks/homebrew-formulae/apache-arrow-glib.rb +++ b/dev/tasks/homebrew-formulae/apache-arrow-glib.rb @@ -29,7 +29,7 @@ class ApacheArrowGlib < Formula desc "GLib bindings for Apache Arrow" homepage "https://arrow.apache.org/" - url "https://www.apache.org/dyn/closer.lua?path=arrow/arrow-14.0.0-SNAPSHOT/apache-arrow-14.0.0-SNAPSHOT.tar.gz" + url "https://www.apache.org/dyn/closer.lua?path=arrow/arrow-15.0.0-SNAPSHOT/apache-arrow-15.0.0-SNAPSHOT.tar.gz" sha256 "9948ddb6d4798b51552d0dca3252dd6e3a7d0f9702714fc6f5a1b59397ce1d28" license "Apache-2.0" head "https://github.com/apache/arrow.git", branch: "main" diff --git a/dev/tasks/homebrew-formulae/apache-arrow.rb b/dev/tasks/homebrew-formulae/apache-arrow.rb index edf2d7b73b8f5..14d229b477dc8 100644 --- a/dev/tasks/homebrew-formulae/apache-arrow.rb +++ b/dev/tasks/homebrew-formulae/apache-arrow.rb @@ -29,7 +29,7 @@ class ApacheArrow < Formula desc "Columnar in-memory analytics layer designed to accelerate big data" homepage "https://arrow.apache.org/" - url "https://www.apache.org/dyn/closer.lua?path=arrow/arrow-14.0.0-SNAPSHOT/apache-arrow-14.0.0-SNAPSHOT.tar.gz" + url "https://www.apache.org/dyn/closer.lua?path=arrow/arrow-15.0.0-SNAPSHOT/apache-arrow-15.0.0-SNAPSHOT.tar.gz" sha256 "9948ddb6d4798b51552d0dca3252dd6e3a7d0f9702714fc6f5a1b59397ce1d28" license "Apache-2.0" head "https://github.com/apache/arrow.git", branch: "main" diff --git a/dev/tasks/java-jars/github.yml b/dev/tasks/java-jars/github.yml index 7dc53a35ef402..fbce12ee427e1 100644 --- a/dev/tasks/java-jars/github.yml +++ b/dev/tasks/java-jars/github.yml @@ -81,7 +81,7 @@ jobs: - { runs_on: ["macos-latest"], arch: "x86_64"} - { runs_on: ["self-hosted", "macOS", "arm64", "devops-managed"], arch: "aarch_64" } env: - MACOSX_DEPLOYMENT_TARGET: "10.13" + MACOSX_DEPLOYMENT_TARGET: "10.15" steps: {{ macros.github_checkout_arrow()|indent }} - name: Set up Python diff --git a/dev/tasks/linux-packages/apache-arrow-apt-source/apt/ubuntu-mantic/Dockerfile b/dev/tasks/linux-packages/apache-arrow-apt-source/apt/ubuntu-mantic/Dockerfile new file mode 100644 index 0000000000000..b5a61282b30fc --- /dev/null +++ b/dev/tasks/linux-packages/apache-arrow-apt-source/apt/ubuntu-mantic/Dockerfile @@ -0,0 +1,41 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +FROM ubuntu:mantic + +RUN \ + echo "debconf debconf/frontend select Noninteractive" | \ + debconf-set-selections + +RUN \ + echo 'APT::Install-Recommends "false";' > \ + /etc/apt/apt.conf.d/disable-install-recommends + +ARG DEBUG + +RUN \ + quiet=$([ "${DEBUG}" = "yes" ] || echo "-qq") && \ + apt update ${quiet} && \ + apt install -y -V ${quiet} \ + build-essential \ + debhelper \ + devscripts \ + fakeroot \ + gnupg \ + lsb-release && \ + apt clean && \ + rm -rf /var/lib/apt/lists/* diff --git a/dev/tasks/linux-packages/apache-arrow-apt-source/debian/changelog b/dev/tasks/linux-packages/apache-arrow-apt-source/debian/changelog index 597a2d4d3b43a..221fb0caa8952 100644 --- a/dev/tasks/linux-packages/apache-arrow-apt-source/debian/changelog +++ b/dev/tasks/linux-packages/apache-arrow-apt-source/debian/changelog @@ -1,3 +1,9 @@ +apache-arrow-apt-source (14.0.0-1) unstable; urgency=low + + * New upstream release. + + -- Raúl Cumplido Thu, 19 Oct 2023 09:12:19 -0000 + apache-arrow-apt-source (13.0.0-1) unstable; urgency=low * New upstream release. diff --git a/dev/tasks/linux-packages/apache-arrow-release/yum/apache-arrow-release.spec.in b/dev/tasks/linux-packages/apache-arrow-release/yum/apache-arrow-release.spec.in index 79cb46006d074..273bf32a2a8e4 100644 --- a/dev/tasks/linux-packages/apache-arrow-release/yum/apache-arrow-release.spec.in +++ b/dev/tasks/linux-packages/apache-arrow-release/yum/apache-arrow-release.spec.in @@ -102,6 +102,9 @@ else fi %changelog +* Thu Oct 19 2023 Raúl Cumplido - 14.0.0-1 +- New upstream release. + * Thu Aug 17 2023 Raúl Cumplido - 13.0.0-1 - New upstream release. diff --git a/dev/tasks/linux-packages/apache-arrow/apt/ubuntu-mantic-arm64/from b/dev/tasks/linux-packages/apache-arrow/apt/ubuntu-mantic-arm64/from new file mode 100644 index 0000000000000..247faef234794 --- /dev/null +++ b/dev/tasks/linux-packages/apache-arrow/apt/ubuntu-mantic-arm64/from @@ -0,0 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +arm64v8/ubuntu:mantic diff --git a/dev/tasks/linux-packages/apache-arrow/apt/ubuntu-mantic/Dockerfile b/dev/tasks/linux-packages/apache-arrow/apt/ubuntu-mantic/Dockerfile new file mode 100644 index 0000000000000..9e90e08d26513 --- /dev/null +++ b/dev/tasks/linux-packages/apache-arrow/apt/ubuntu-mantic/Dockerfile @@ -0,0 +1,85 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +ARG FROM=ubuntu:mantic +FROM ${FROM} + +RUN \ + echo "debconf debconf/frontend select Noninteractive" | \ + debconf-set-selections + +RUN \ + echo 'APT::Install-Recommends "false";' > \ + /etc/apt/apt.conf.d/disable-install-recommends + +ARG DEBUG +RUN \ + quiet=$([ "${DEBUG}" = "yes" ] || echo "-qq") && \ + apt update ${quiet} && \ + apt install -y -V ${quiet} \ + build-essential \ + clang \ + clang-tools \ + cmake \ + debhelper \ + devscripts \ + git \ + gtk-doc-tools \ + libboost-filesystem-dev \ + libboost-system-dev \ + libbrotli-dev \ + libbz2-dev \ + libc-ares-dev \ + libcurl4-openssl-dev \ + libgirepository1.0-dev \ + libglib2.0-doc \ + libgmock-dev \ + libgoogle-glog-dev \ + libgrpc++-dev \ + libgtest-dev \ + liblz4-dev \ + libmlir-15-dev \ + libprotobuf-dev \ + libprotoc-dev \ + libre2-dev \ + libsnappy-dev \ + libssl-dev \ + libthrift-dev \ + libutf8proc-dev \ + libzstd-dev \ + llvm-dev \ + lsb-release \ + meson \ + mlir-15-tools \ + ninja-build \ + nlohmann-json3-dev \ + pkg-config \ + protobuf-compiler-grpc \ + python3-dev \ + python3-pip \ + python3-setuptools \ + rapidjson-dev \ + tzdata \ + valac \ + zlib1g-dev && \ + if apt list | grep -q '^libcuda1'; then \ + apt install -y -V ${quiet} nvidia-cuda-toolkit; \ + else \ + :; \ + fi && \ + apt clean && \ + rm -rf /var/lib/apt/lists/* diff --git a/dev/tasks/linux-packages/apache-arrow/debian/changelog b/dev/tasks/linux-packages/apache-arrow/debian/changelog index 525ae9cae02a0..5e01d962c44d4 100644 --- a/dev/tasks/linux-packages/apache-arrow/debian/changelog +++ b/dev/tasks/linux-packages/apache-arrow/debian/changelog @@ -1,3 +1,9 @@ +apache-arrow (14.0.0-1) unstable; urgency=low + + * New upstream release. + + -- Raúl Cumplido Thu, 19 Oct 2023 09:12:19 -0000 + apache-arrow (13.0.0-1) unstable; urgency=low * New upstream release. diff --git a/dev/tasks/linux-packages/apache-arrow/debian/control.in b/dev/tasks/linux-packages/apache-arrow/debian/control.in index f08fc05bfc3ad..6ea7e56e88365 100644 --- a/dev/tasks/linux-packages/apache-arrow/debian/control.in +++ b/dev/tasks/linux-packages/apache-arrow/debian/control.in @@ -41,7 +41,7 @@ Build-Depends-Indep: libglib2.0-doc Standards-Version: 3.9.8 Homepage: https://arrow.apache.org/ -Package: libarrow1400 +Package: libarrow1500 Section: libs Architecture: any Multi-Arch: same @@ -61,12 +61,12 @@ Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends}, - libarrow1400 (= ${binary:Version}) + libarrow1500 (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . This package provides tools. -Package: libarrow-cuda1400 +Package: libarrow-cuda1500 Section: libs Architecture: @CUDA_ARCHITECTURE@ Multi-Arch: same @@ -74,12 +74,12 @@ Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends}, - libarrow1400 (= ${binary:Version}) + libarrow1500 (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . This package provides C++ library files for CUDA support. -Package: libarrow-acero1400 +Package: libarrow-acero1500 Section: libs Architecture: any Multi-Arch: same @@ -87,12 +87,12 @@ Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends}, - libarrow1400 (= ${binary:Version}) + libarrow1500 (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . This package provides C++ library files for Acero module. -Package: libarrow-dataset1400 +Package: libarrow-dataset1500 Section: libs Architecture: any Multi-Arch: same @@ -100,13 +100,13 @@ Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends}, - libarrow-acero1400 (= ${binary:Version}), - libparquet1400 (= ${binary:Version}) + libarrow-acero1500 (= ${binary:Version}), + libparquet1500 (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . This package provides C++ library files for Dataset module. -Package: libarrow-flight1400 +Package: libarrow-flight1500 Section: libs Architecture: any Multi-Arch: same @@ -114,12 +114,12 @@ Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends}, - libarrow1400 (= ${binary:Version}) + libarrow1500 (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . This package provides C++ library files for Flight RPC system. -Package: libarrow-flight-sql1400 +Package: libarrow-flight-sql1500 Section: libs Architecture: any Multi-Arch: same @@ -127,7 +127,7 @@ Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends}, - libarrow-flight1400 (= ${binary:Version}) + libarrow-flight1500 (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . This package provides C++ library files for Flight SQL system. @@ -138,7 +138,7 @@ Architecture: any Multi-Arch: same Depends: ${misc:Depends}, - libarrow1400 (= ${binary:Version}), + libarrow1500 (= ${binary:Version}), libbrotli-dev, libbz2-dev, libcurl4-openssl-dev, @@ -163,7 +163,7 @@ Multi-Arch: same Depends: ${misc:Depends}, libarrow-dev (= ${binary:Version}), - libarrow-cuda1400 (= ${binary:Version}) + libarrow-cuda1500 (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . This package provides C++ header files for CUDA support. @@ -174,7 +174,7 @@ Architecture: any Multi-Arch: same Depends: ${misc:Depends}, - libarrow-acero1400 (= ${binary:Version}), + libarrow-acero1500 (= ${binary:Version}), libparquet-dev (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . @@ -187,7 +187,7 @@ Multi-Arch: same Depends: ${misc:Depends}, libarrow-acero-dev (= ${binary:Version}), - libarrow-dataset1400 (= ${binary:Version}), + libarrow-dataset1500 (= ${binary:Version}), libparquet-dev (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . @@ -200,7 +200,7 @@ Multi-Arch: same Depends: ${misc:Depends}, libarrow-dev (= ${binary:Version}), - libarrow-flight1400 (= ${binary:Version}), + libarrow-flight1500 (= ${binary:Version}), libc-ares-dev, @USE_SYSTEM_GRPC@ libgrpc++-dev, @USE_SYSTEM_PROTOBUF@ libprotobuf-dev, @@ -216,12 +216,12 @@ Multi-Arch: same Depends: ${misc:Depends}, libarrow-flight-dev (= ${binary:Version}), - libarrow-flight-sql1400 (= ${binary:Version}) + libarrow-flight-sql1500 (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . This package provides C++ header files for Flight SQL system. -Package: libgandiva1400 +Package: libgandiva1500 Section: libs Architecture: any Multi-Arch: same @@ -229,7 +229,7 @@ Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends}, - libarrow1400 (= ${binary:Version}) + libarrow1500 (= ${binary:Version}) Description: Gandiva is a toolset for compiling and evaluating expressions on Arrow Data. . @@ -242,13 +242,13 @@ Multi-Arch: same Depends: ${misc:Depends}, libarrow-dev (= ${binary:Version}), - libgandiva1400 (= ${binary:Version}) + libgandiva1500 (= ${binary:Version}) Description: Gandiva is a toolset for compiling and evaluating expressions on Arrow Data. . This package provides C++ header files. -Package: libparquet1400 +Package: libparquet1500 Section: libs Architecture: any Multi-Arch: same @@ -268,7 +268,7 @@ Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends}, - libparquet1400 (= ${binary:Version}) + libparquet1500 (= ${binary:Version}) Description: Apache Parquet is a columnar storage format . This package provides tools. @@ -280,13 +280,13 @@ Multi-Arch: same Depends: ${misc:Depends}, libarrow-dev (= ${binary:Version}), - libparquet1400 (= ${binary:Version}), + libparquet1500 (= ${binary:Version}), libthrift-dev Description: Apache Parquet is a columnar storage format . This package provides C++ header files. -Package: libarrow-glib1400 +Package: libarrow-glib1500 Section: libs Architecture: any Multi-Arch: same @@ -294,7 +294,7 @@ Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends}, - libarrow1400 (= ${binary:Version}) + libarrow1500 (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . This package provides GLib based library files. @@ -318,7 +318,7 @@ Depends: ${misc:Depends}, libglib2.0-dev, libarrow-acero-dev (= ${binary:Version}), - libarrow-glib1400 (= ${binary:Version}), + libarrow-glib1500 (= ${binary:Version}), gir1.2-arrow-1.0 (= ${binary:Version}) Suggests: libarrow-glib-doc Description: Apache Arrow is a data processing library for analysis @@ -336,7 +336,7 @@ Description: Apache Arrow is a data processing library for analysis . This package provides documentations. -Package: libarrow-cuda-glib1400 +Package: libarrow-cuda-glib1500 Section: libs Architecture: @CUDA_ARCHITECTURE@ Multi-Arch: same @@ -344,8 +344,8 @@ Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends}, - libarrow-glib1400 (= ${binary:Version}), - libarrow-cuda1400 (= ${binary:Version}) + libarrow-glib1500 (= ${binary:Version}), + libarrow-cuda1500 (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . This package provides GLib based library files for CUDA support. @@ -370,13 +370,13 @@ Depends: ${misc:Depends}, libarrow-cuda-dev (= ${binary:Version}), libarrow-glib-dev (= ${binary:Version}), - libarrow-cuda-glib1400 (= ${binary:Version}), + libarrow-cuda-glib1500 (= ${binary:Version}), gir1.2-arrow-cuda-1.0 (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . This package provides GLib based header files for CUDA support. -Package: libarrow-dataset-glib1400 +Package: libarrow-dataset-glib1500 Section: libs Architecture: any Multi-Arch: same @@ -384,8 +384,8 @@ Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends}, - libarrow-glib1400 (= ${binary:Version}), - libarrow-dataset1400 (= ${binary:Version}) + libarrow-glib1500 (= ${binary:Version}), + libarrow-dataset1500 (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . This package provides GLib based library files for dataset module. @@ -410,7 +410,7 @@ Depends: ${misc:Depends}, libarrow-dataset-dev (= ${binary:Version}), libarrow-glib-dev (= ${binary:Version}), - libarrow-dataset-glib1400 (= ${binary:Version}), + libarrow-dataset-glib1500 (= ${binary:Version}), gir1.2-arrow-dataset-1.0 (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . @@ -427,7 +427,7 @@ Description: Apache Arrow is a data processing library for analysis . This package provides documentations for dataset module. -Package: libarrow-flight-glib1400 +Package: libarrow-flight-glib1500 Section: libs Architecture: any Multi-Arch: same @@ -435,8 +435,8 @@ Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends}, - libarrow-glib1400 (= ${binary:Version}), - libarrow-flight1400 (= ${binary:Version}) + libarrow-glib1500 (= ${binary:Version}), + libarrow-flight1500 (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . This package provides GLib based library files for Apache Arrow Flight. @@ -462,7 +462,7 @@ Depends: ${misc:Depends}, libarrow-flight-dev (= ${binary:Version}), libarrow-glib-dev (= ${binary:Version}), - libarrow-flight-glib1400 (= ${binary:Version}), + libarrow-flight-glib1500 (= ${binary:Version}), gir1.2-arrow-flight-1.0 (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . @@ -479,7 +479,7 @@ Description: Apache Arrow is a data processing library for analysis . This package provides documentations for Apache Arrow Flight. -Package: libarrow-flight-sql-glib1400 +Package: libarrow-flight-sql-glib1500 Section: libs Architecture: any Multi-Arch: same @@ -487,8 +487,8 @@ Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends}, - libarrow-flight-glib1400 (= ${binary:Version}), - libarrow-flight-sql1400 (= ${binary:Version}) + libarrow-flight-glib1500 (= ${binary:Version}), + libarrow-flight-sql1500 (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . This package provides GLib based library files for Apache Arrow Flight SQL. @@ -514,7 +514,7 @@ Depends: ${misc:Depends}, libarrow-flight-sql-dev (= ${binary:Version}), libarrow-flight-glib-dev (= ${binary:Version}), - libarrow-flight-sql-glib1400 (= ${binary:Version}), + libarrow-flight-sql-glib1500 (= ${binary:Version}), gir1.2-arrow-flight-sql-1.0 (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . @@ -531,7 +531,7 @@ Description: Apache Arrow is a data processing library for analysis . This package provides documentations for Apache Arrow Flight SQL. -Package: libgandiva-glib1400 +Package: libgandiva-glib1500 Section: libs Architecture: any Multi-Arch: same @@ -539,8 +539,8 @@ Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends}, - libarrow-glib1400 (= ${binary:Version}), - libgandiva1400 (= ${binary:Version}) + libarrow-glib1500 (= ${binary:Version}), + libgandiva1500 (= ${binary:Version}) Description: Gandiva is a toolset for compiling and evaluating expressions on Arrow Data. . @@ -567,7 +567,7 @@ Depends: ${misc:Depends}, libgandiva-dev (= ${binary:Version}), libarrow-glib-dev (= ${binary:Version}), - libgandiva-glib1400 (= ${binary:Version}), + libgandiva-glib1500 (= ${binary:Version}), gir1.2-gandiva-1.0 (= ${binary:Version}) Description: Gandiva is a toolset for compiling and evaluating expressions on Arrow Data. @@ -586,7 +586,7 @@ Description: Gandiva is a toolset for compiling and evaluating expressions . This package provides documentations. -Package: libparquet-glib1400 +Package: libparquet-glib1500 Section: libs Architecture: any Multi-Arch: same @@ -594,8 +594,8 @@ Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends}, - libarrow-glib1400 (= ${binary:Version}), - libparquet1400 (= ${binary:Version}) + libarrow-glib1500 (= ${binary:Version}), + libparquet1500 (= ${binary:Version}) Description: Apache Parquet is a columnar storage format . This package provides GLib based library files. @@ -620,7 +620,7 @@ Depends: ${misc:Depends}, libarrow-glib-dev (= ${binary:Version}), libparquet-dev (= ${binary:Version}), - libparquet-glib1400 (= ${binary:Version}), + libparquet-glib1500 (= ${binary:Version}), gir1.2-parquet-1.0 (= ${binary:Version}) Suggests: libparquet-glib-doc Description: Apache Parquet is a columnar storage format diff --git a/dev/tasks/linux-packages/apache-arrow/debian/libarrow-acero1400.install b/dev/tasks/linux-packages/apache-arrow/debian/libarrow-acero1500.install similarity index 100% rename from dev/tasks/linux-packages/apache-arrow/debian/libarrow-acero1400.install rename to dev/tasks/linux-packages/apache-arrow/debian/libarrow-acero1500.install diff --git a/dev/tasks/linux-packages/apache-arrow/debian/libarrow-cuda-glib1400.install b/dev/tasks/linux-packages/apache-arrow/debian/libarrow-cuda-glib1500.install similarity index 100% rename from dev/tasks/linux-packages/apache-arrow/debian/libarrow-cuda-glib1400.install rename to dev/tasks/linux-packages/apache-arrow/debian/libarrow-cuda-glib1500.install diff --git a/dev/tasks/linux-packages/apache-arrow/debian/libarrow-cuda1400.install b/dev/tasks/linux-packages/apache-arrow/debian/libarrow-cuda1500.install similarity index 100% rename from dev/tasks/linux-packages/apache-arrow/debian/libarrow-cuda1400.install rename to dev/tasks/linux-packages/apache-arrow/debian/libarrow-cuda1500.install diff --git a/dev/tasks/linux-packages/apache-arrow/debian/libarrow-dataset-glib1400.install b/dev/tasks/linux-packages/apache-arrow/debian/libarrow-dataset-glib1500.install similarity index 100% rename from dev/tasks/linux-packages/apache-arrow/debian/libarrow-dataset-glib1400.install rename to dev/tasks/linux-packages/apache-arrow/debian/libarrow-dataset-glib1500.install diff --git a/dev/tasks/linux-packages/apache-arrow/debian/libarrow-dataset1400.install b/dev/tasks/linux-packages/apache-arrow/debian/libarrow-dataset1500.install similarity index 100% rename from dev/tasks/linux-packages/apache-arrow/debian/libarrow-dataset1400.install rename to dev/tasks/linux-packages/apache-arrow/debian/libarrow-dataset1500.install diff --git a/dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight-glib1400.install b/dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight-glib1500.install similarity index 100% rename from dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight-glib1400.install rename to dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight-glib1500.install diff --git a/dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight-sql-glib1400.install b/dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight-sql-glib1500.install similarity index 100% rename from dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight-sql-glib1400.install rename to dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight-sql-glib1500.install diff --git a/dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight-sql1400.install b/dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight-sql1500.install similarity index 100% rename from dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight-sql1400.install rename to dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight-sql1500.install diff --git a/dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight1400.install b/dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight1500.install similarity index 100% rename from dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight1400.install rename to dev/tasks/linux-packages/apache-arrow/debian/libarrow-flight1500.install diff --git a/dev/tasks/linux-packages/apache-arrow/debian/libarrow-glib1400.install b/dev/tasks/linux-packages/apache-arrow/debian/libarrow-glib1500.install similarity index 100% rename from dev/tasks/linux-packages/apache-arrow/debian/libarrow-glib1400.install rename to dev/tasks/linux-packages/apache-arrow/debian/libarrow-glib1500.install diff --git a/dev/tasks/linux-packages/apache-arrow/debian/libarrow1400.install b/dev/tasks/linux-packages/apache-arrow/debian/libarrow1500.install similarity index 100% rename from dev/tasks/linux-packages/apache-arrow/debian/libarrow1400.install rename to dev/tasks/linux-packages/apache-arrow/debian/libarrow1500.install diff --git a/dev/tasks/linux-packages/apache-arrow/debian/libgandiva-glib1400.install b/dev/tasks/linux-packages/apache-arrow/debian/libgandiva-glib1500.install similarity index 100% rename from dev/tasks/linux-packages/apache-arrow/debian/libgandiva-glib1400.install rename to dev/tasks/linux-packages/apache-arrow/debian/libgandiva-glib1500.install diff --git a/dev/tasks/linux-packages/apache-arrow/debian/libgandiva1400.install b/dev/tasks/linux-packages/apache-arrow/debian/libgandiva1500.install similarity index 100% rename from dev/tasks/linux-packages/apache-arrow/debian/libgandiva1400.install rename to dev/tasks/linux-packages/apache-arrow/debian/libgandiva1500.install diff --git a/dev/tasks/linux-packages/apache-arrow/debian/libparquet-glib1400.install b/dev/tasks/linux-packages/apache-arrow/debian/libparquet-glib1500.install similarity index 100% rename from dev/tasks/linux-packages/apache-arrow/debian/libparquet-glib1400.install rename to dev/tasks/linux-packages/apache-arrow/debian/libparquet-glib1500.install diff --git a/dev/tasks/linux-packages/apache-arrow/debian/libparquet1400.install b/dev/tasks/linux-packages/apache-arrow/debian/libparquet1500.install similarity index 100% rename from dev/tasks/linux-packages/apache-arrow/debian/libparquet1400.install rename to dev/tasks/linux-packages/apache-arrow/debian/libparquet1500.install diff --git a/dev/tasks/linux-packages/apache-arrow/yum/arrow.spec.in b/dev/tasks/linux-packages/apache-arrow/yum/arrow.spec.in index 4691f9e5439da..f61d47db2edd7 100644 --- a/dev/tasks/linux-packages/apache-arrow/yum/arrow.spec.in +++ b/dev/tasks/linux-packages/apache-arrow/yum/arrow.spec.in @@ -864,6 +864,9 @@ Documentation for Apache Parquet GLib. %{_datadir}/gtk-doc/html/parquet-glib/ %changelog +* Thu Oct 19 2023 Raúl Cumplido - 14.0.0-1 +- New upstream release. + * Thu Aug 17 2023 Raúl Cumplido - 13.0.0-1 - New upstream release. diff --git a/dev/tasks/linux-packages/package-task.rb b/dev/tasks/linux-packages/package-task.rb index bb5d70b64f2b0..da281d0ee2cf9 100644 --- a/dev/tasks/linux-packages/package-task.rb +++ b/dev/tasks/linux-packages/package-task.rb @@ -271,12 +271,16 @@ def apt_targets_default # "debian-bullseye-arm64", "debian-bookworm", # "debian-bookworm-arm64", + "debian-trixie", + # "debian-trixie-arm64", "ubuntu-focal", # "ubuntu-focal-arm64", "ubuntu-jammy", # "ubuntu-jammy-arm64", "ubuntu-lunar", # "ubuntu-lunar-arm64", + "ubuntu-mantic", + # "ubuntu-mantic-arm64", ] end diff --git a/dev/tasks/tasks.yml b/dev/tasks/tasks.yml index 17123ed2b8339..15fac25d26d65 100644 --- a/dev/tasks/tasks.yml +++ b/dev/tasks/tasks.yml @@ -487,7 +487,7 @@ tasks: {############################## Wheel OSX ####################################} -{% for macos_version, macos_codename in [("10.14", "mojave")] %} +{% for macos_version, macos_codename in [("10.15", "catalina")] %} {% set platform_tag = "macosx_{}_x86_64".format(macos_version.replace('.', '_')) %} wheel-macos-{{ macos_codename }}-{{ python_tag }}-amd64: @@ -557,7 +557,8 @@ tasks: "debian-trixie", "ubuntu-focal", "ubuntu-jammy", - "ubuntu-lunar"] %} + "ubuntu-lunar", + "ubuntu-mantic"] %} {% for architecture in ["amd64", "arm64"] %} {{ target }}-{{ architecture }}: ci: github @@ -594,59 +595,59 @@ tasks: - gir1.2-gandiva-1.0_{no_rc_version}-1_[a-z0-9]+.deb - gir1.2-parquet-1.0_{no_rc_version}-1_[a-z0-9]+.deb - libarrow-acero-dev_{no_rc_version}-1_[a-z0-9]+.deb - - libarrow-acero1400-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb - - libarrow-acero1400_{no_rc_version}-1_[a-z0-9]+.deb + - libarrow-acero1500-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb + - libarrow-acero1500_{no_rc_version}-1_[a-z0-9]+.deb - libarrow-dataset-dev_{no_rc_version}-1_[a-z0-9]+.deb - libarrow-dataset-glib-dev_{no_rc_version}-1_[a-z0-9]+.deb - libarrow-dataset-glib-doc_{no_rc_version}-1_[a-z0-9]+.deb - - libarrow-dataset-glib1400-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb - - libarrow-dataset-glib1400_{no_rc_version}-1_[a-z0-9]+.deb - - libarrow-dataset1400-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb - - libarrow-dataset1400_{no_rc_version}-1_[a-z0-9]+.deb + - libarrow-dataset-glib1500-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb + - libarrow-dataset-glib1500_{no_rc_version}-1_[a-z0-9]+.deb + - libarrow-dataset1500-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb + - libarrow-dataset1500_{no_rc_version}-1_[a-z0-9]+.deb - libarrow-dev_{no_rc_version}-1_[a-z0-9]+.deb - libarrow-flight-dev_{no_rc_version}-1_[a-z0-9]+.deb - libarrow-flight-glib-dev_{no_rc_version}-1_[a-z0-9]+.deb - libarrow-flight-glib-doc_{no_rc_version}-1_[a-z0-9]+.deb - - libarrow-flight-glib1400-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb - - libarrow-flight-glib1400_{no_rc_version}-1_[a-z0-9]+.deb + - libarrow-flight-glib1500-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb + - libarrow-flight-glib1500_{no_rc_version}-1_[a-z0-9]+.deb - libarrow-flight-sql-dev_{no_rc_version}-1_[a-z0-9]+.deb - libarrow-flight-sql-glib-dev_{no_rc_version}-1_[a-z0-9]+.deb - libarrow-flight-sql-glib-doc_{no_rc_version}-1_[a-z0-9]+.deb - - libarrow-flight-sql-glib1400-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb - - libarrow-flight-sql-glib1400_{no_rc_version}-1_[a-z0-9]+.deb - - libarrow-flight-sql1400-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb - - libarrow-flight-sql1400_{no_rc_version}-1_[a-z0-9]+.deb - - libarrow-flight1400-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb - - libarrow-flight1400_{no_rc_version}-1_[a-z0-9]+.deb + - libarrow-flight-sql-glib1500-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb + - libarrow-flight-sql-glib1500_{no_rc_version}-1_[a-z0-9]+.deb + - libarrow-flight-sql1500-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb + - libarrow-flight-sql1500_{no_rc_version}-1_[a-z0-9]+.deb + - libarrow-flight1500-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb + - libarrow-flight1500_{no_rc_version}-1_[a-z0-9]+.deb - libarrow-glib-dev_{no_rc_version}-1_[a-z0-9]+.deb - libarrow-glib-doc_{no_rc_version}-1_[a-z0-9]+.deb - - libarrow-glib1400-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb - - libarrow-glib1400_{no_rc_version}-1_[a-z0-9]+.deb - - libarrow1400-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb - - libarrow1400_{no_rc_version}-1_[a-z0-9]+.deb + - libarrow-glib1500-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb + - libarrow-glib1500_{no_rc_version}-1_[a-z0-9]+.deb + - libarrow1500-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb + - libarrow1500_{no_rc_version}-1_[a-z0-9]+.deb - libgandiva-dev_{no_rc_version}-1_[a-z0-9]+.deb - libgandiva-glib-dev_{no_rc_version}-1_[a-z0-9]+.deb - libgandiva-glib-doc_{no_rc_version}-1_[a-z0-9]+.deb - - libgandiva-glib1400-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb - - libgandiva-glib1400_{no_rc_version}-1_[a-z0-9]+.deb - - libgandiva1400-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb - - libgandiva1400_{no_rc_version}-1_[a-z0-9]+.deb + - libgandiva-glib1500-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb + - libgandiva-glib1500_{no_rc_version}-1_[a-z0-9]+.deb + - libgandiva1500-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb + - libgandiva1500_{no_rc_version}-1_[a-z0-9]+.deb - libparquet-dev_{no_rc_version}-1_[a-z0-9]+.deb - libparquet-glib-dev_{no_rc_version}-1_[a-z0-9]+.deb - libparquet-glib-doc_{no_rc_version}-1_[a-z0-9]+.deb - - libparquet-glib1400-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb - - libparquet-glib1400_{no_rc_version}-1_[a-z0-9]+.deb - - libparquet1400-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb - - libparquet1400_{no_rc_version}-1_[a-z0-9]+.deb + - libparquet-glib1500-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb + - libparquet-glib1500_{no_rc_version}-1_[a-z0-9]+.deb + - libparquet1500-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb + - libparquet1500_{no_rc_version}-1_[a-z0-9]+.deb - parquet-tools_{no_rc_version}-1_[a-z0-9]+.deb {% if architecture == "amd64" %} - gir1.2-arrow-cuda-1.0_{no_rc_version}-1_[a-z0-9]+.deb - libarrow-cuda-dev_{no_rc_version}-1_[a-z0-9]+.deb - libarrow-cuda-glib-dev_{no_rc_version}-1_[a-z0-9]+.deb - - libarrow-cuda-glib1400-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb - - libarrow-cuda-glib1400_{no_rc_version}-1_[a-z0-9]+.deb - - libarrow-cuda1400-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb - - libarrow-cuda1400_{no_rc_version}-1_[a-z0-9]+.deb + - libarrow-cuda-glib1500-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb + - libarrow-cuda-glib1500_{no_rc_version}-1_[a-z0-9]+.deb + - libarrow-cuda1500-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb + - libarrow-cuda1500_{no_rc_version}-1_[a-z0-9]+.deb {% endif %} {% endfor %} {% endfor %} @@ -1182,12 +1183,12 @@ tasks: image: debian-cpp {% endfor %} - test-fedora-35-cpp: + test-fedora-38-cpp: ci: github template: docker-tests/github.linux.yml params: env: - FEDORA: 35 + FEDORA: 38 image: fedora-cpp {% for cpp_standard in [20] %} @@ -1295,12 +1296,12 @@ tasks: UBUNTU: 22.04 image: ubuntu-python - test-fedora-35-python-3: + test-fedora-38-python-3: ci: azure template: docker-tests/azure.linux.yml params: env: - FEDORA: 35 + FEDORA: 38 image: fedora-python test-r-linux-valgrind: diff --git a/docker-compose.yml b/docker-compose.yml index e54c609e54138..e2c993ee9ea41 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -135,11 +135,10 @@ x-hierarchy: - debian-go: - debian-go-cgo - debian-go-cgo-python - - debian-java - debian-js - - eclipse-java - fedora-cpp: - fedora-python + - java - python-sdist - ubuntu-cpp: - ubuntu-cpp-static @@ -655,7 +654,7 @@ services: # docker-compose run --rm fedora-cpp # Parameters: # ARCH: amd64, arm64v8, ... - # FEDORA: 33 + # FEDORA: 38 image: ${REPO}:${ARCH}-fedora-${FEDORA}-cpp build: context: . @@ -671,7 +670,6 @@ services: <<: [*common, *ccache, *sccache, *cpp] ARROW_ENABLE_TIMING_TESTS: # inherit ARROW_MIMALLOC: "ON" - Protobuf_SOURCE: "BUNDLED" # Need Protobuf >= 3.15 volumes: &fedora-volumes - .:/arrow:delegated - ${DOCKER_VOLUME_PREFIX}fedora-ccache:/ccache:delegated @@ -957,7 +955,7 @@ services: # docker-compose run --rm fedora-python # Parameters: # ARCH: amd64, arm64v8, ... - # FEDORA: 33 + # FEDORA: 38 image: ${REPO}:${ARCH}-fedora-${FEDORA}-python-3 build: context: . @@ -969,7 +967,6 @@ services: shm_size: *shm-size environment: <<: [*common, *ccache] - Protobuf_SOURCE: "BUNDLED" # Need Protobuf >= 3.15 volumes: *fedora-volumes command: *python-command @@ -1676,14 +1673,14 @@ services: ################################ Java ####################################### - debian-java: + java: # Usage: - # docker-compose build debian-java - # docker-compose run debian-java + # docker-compose build java + # docker-compose run java # Parameters: - # MAVEN: 3.5.4, 3.6.2 - # JDK: 8, 11 - image: ${ARCH}/maven:${MAVEN}-jdk-${JDK} + # MAVEN: 3.9.5 + # JDK: 8, 11, 17, 21 + image: ${ARCH}/maven:${MAVEN}-eclipse-temurin-${JDK} shm_size: *shm-size volumes: &java-volumes - .:/arrow:delegated @@ -1693,18 +1690,6 @@ services: /arrow/ci/scripts/java_build.sh /arrow /build && /arrow/ci/scripts/java_test.sh /arrow /build" - eclipse-java: - # Usage: - # docker-compose build eclipse-java - # docker-compose run eclipse-java - # Parameters: - # MAVEN: 3.9.4 - # JDK: 17, 21 - image: ${ARCH}/maven:${MAVEN}-eclipse-temurin-${JDK} - shm_size: *shm-size - volumes: *java-volumes - command: *java-command - ############################## Integration ################################## conda-integration: @@ -1733,19 +1718,8 @@ services: ARCHERY_INTEGRATION_WITH_RUST: 0 # Tell Archery where the arrow C++ binaries are located ARROW_CPP_EXE_PATH: /build/cpp/debug - ARROW_GO_INTEGRATION: 1 - ARROW_JAVA_CDATA: "ON" - JAVA_JNI_CMAKE_ARGS: >- - -DARROW_JAVA_JNI_ENABLE_DEFAULT=OFF - -DARROW_JAVA_JNI_ENABLE_C=ON command: - ["/arrow/ci/scripts/rust_build.sh /arrow /build && - /arrow/ci/scripts/cpp_build.sh /arrow /build && - /arrow/ci/scripts/csharp_build.sh /arrow /build && - /arrow/ci/scripts/go_build.sh /arrow && - /arrow/ci/scripts/java_jni_build.sh /arrow $${ARROW_HOME} /build /tmp/dist/java/$$(arch) && - /arrow/ci/scripts/java_build.sh /arrow /build /tmp/dist/java && - /arrow/ci/scripts/js_build.sh /arrow /build && + ["/arrow/ci/scripts/integration_arrow_build.sh /arrow /build && /arrow/ci/scripts/integration_arrow.sh /arrow /build"] ################################ Docs ####################################### diff --git a/docs/source/_static/versions.json b/docs/source/_static/versions.json index 8d9c5878c8213..10e179420b803 100644 --- a/docs/source/_static/versions.json +++ b/docs/source/_static/versions.json @@ -1,15 +1,20 @@ [ { - "name": "14.0 (dev)", + "name": "15.0 (dev)", "version": "dev/", "url": "https://arrow.apache.org/docs/dev/" }, { - "name": "13.0 (stable)", + "name": "14.0 (stable)", "version": "", "url": "https://arrow.apache.org/docs/", "preferred": true }, + { + "name": "13.0", + "version": "13.0/", + "url": "https://arrow.apache.org/docs/13.0/" + }, { "name": "12.0", "version": "12.0/", diff --git a/docs/source/cpp/datatypes.rst b/docs/source/cpp/datatypes.rst index 1d2133cbdf3d6..922fef1498b9c 100644 --- a/docs/source/cpp/datatypes.rst +++ b/docs/source/cpp/datatypes.rst @@ -157,7 +157,7 @@ Visitor Pattern --------------- In order to process :class:`arrow::DataType`, :class:`arrow::Scalar`, or -:class:`arrow::Array`, you may need to write write logic that specializes based +:class:`arrow::Array`, you may need to write logic that specializes based on the particular Arrow type. In these cases, use the `visitor pattern `_. Arrow provides the template functions: diff --git a/docs/source/cpp/examples/compute_and_write_example.rst b/docs/source/cpp/examples/compute_and_write_example.rst index 096b97b837e05..c4480a5f5cdf1 100644 --- a/docs/source/cpp/examples/compute_and_write_example.rst +++ b/docs/source/cpp/examples/compute_and_write_example.rst @@ -23,6 +23,6 @@ Compute and Write CSV Example The file ``cpp/examples/arrow/compute_and_write_csv_example.cc`` located inside the source tree contains an example of creating a table of two numerical columns -and then compariong the magnitudes of the entries in the columns and wrting out to +and then comparing the magnitudes of the entries in the columns and wrting out to a CSV file with the column entries and their comparisons. The code in the example is documented. diff --git a/docs/source/cpp/parquet.rst b/docs/source/cpp/parquet.rst index 23fca8fd73010..3e06352f5dde3 100644 --- a/docs/source/cpp/parquet.rst +++ b/docs/source/cpp/parquet.rst @@ -481,6 +481,8 @@ physical type. +-------------------+-----------------------------+----------------------------+---------+ | MAP | Any | Map | \(6) | +-------------------+-----------------------------+----------------------------+---------+ +| FLOAT16 | FIXED_LENGTH_BYTE_ARRAY | HalfFloat | | ++-------------------+-----------------------------+----------------------------+---------+ * \(1) On the write side, the Parquet physical type INT32 is generated. diff --git a/docs/source/developers/java/building.rst b/docs/source/developers/java/building.rst index 061c616d4b971..8b2a504631fdb 100644 --- a/docs/source/developers/java/building.rst +++ b/docs/source/developers/java/building.rst @@ -76,7 +76,7 @@ Docker compose $ cd arrow/java $ export JAVA_HOME= $ java --version - $ docker-compose run debian-java + $ docker-compose run java Archery ~~~~~~~ @@ -86,7 +86,7 @@ Archery $ cd arrow/java $ export JAVA_HOME= $ java --version - $ archery docker run debian-java + $ archery docker run java Building JNI Libraries (\*.dylib / \*.so / \*.dll) -------------------------------------------------- diff --git a/docs/source/developers/python.rst b/docs/source/developers/python.rst index e4699a65e211e..be9fac067cb52 100644 --- a/docs/source/developers/python.rst +++ b/docs/source/developers/python.rst @@ -405,6 +405,12 @@ set the ``PYARROW_PARALLEL`` environment variable. If you wish to delete stale PyArrow build artifacts before rebuilding, navigate to the ``arrow/python`` folder and run ``git clean -Xfd .``. +By default, PyArrow will be built in release mode even if Arrow C++ has been +built in debug mode. To create a debug build of PyArrow, run +``export PYARROW_BUILD_TYPE=debug`` prior to running ``python setup.py +build_ext --inplace`` above. A ``relwithdebinfo`` build can be created +similarly. + Now you are ready to install test dependencies and run `Unit Testing`_, as described above. @@ -434,6 +440,9 @@ Debugging Since pyarrow depends on the Arrow C++ libraries, debugging can frequently involve crossing between Python and C++ shared libraries. +For the best experience, make sure you've built both Arrow C++ +(``-DCMAKE_BUILD_TYPE=Debug``) and PyArrow (``export PYARROW_BUILD_TYPE=debug``) +in debug mode. Using gdb on Linux ~~~~~~~~~~~~~~~~~~ diff --git a/docs/source/format/CDataInterface/PyCapsuleInterface.rst b/docs/source/format/CDataInterface/PyCapsuleInterface.rst index 263c428c1ef0e..0c1a01d7c6778 100644 --- a/docs/source/format/CDataInterface/PyCapsuleInterface.rst +++ b/docs/source/format/CDataInterface/PyCapsuleInterface.rst @@ -30,7 +30,7 @@ The :ref:`C data interface ` and different implementations of Arrow. However, these interfaces don't specify how Python libraries should expose these structs to other libraries. Prior to this, many libraries simply provided export to PyArrow data structures, using the -``_import_from_c`` and ``_export_from_c`` methods. However, this always required +``_import_from_c`` and ``_export_to_c`` methods. However, this always required PyArrow to be installed. In addition, those APIs could cause memory leaks if handled improperly. diff --git a/docs/source/format/CDeviceDataInterface.rst b/docs/source/format/CDeviceDataInterface.rst index b54e6eabe0b2d..a584852df87eb 100644 --- a/docs/source/format/CDeviceDataInterface.rst +++ b/docs/source/format/CDeviceDataInterface.rst @@ -277,7 +277,7 @@ has the following fields: to access the memory in the buffers. If an event is provided, then the producer MUST ensure that the exported - data is available on the device before the event is triggered. The + data is available on the device before the event is triggered. The consumer SHOULD wait on the event before trying to access the exported data. @@ -290,7 +290,7 @@ has the following fields: As non-CPU development expands, there may be a need to expand this structure. In order to do so without potentially breaking ABI changes, we reserve 24 bytes at the end of the object. These bytes MUST be zero'd - out after initialization by the producer in order to ensure safe + out after initialization by the producer in order to ensure safe evolution of the ABI in the future. .. _c-device-data-interface-event-types: @@ -300,7 +300,7 @@ Synchronization event types The table below lists the expected event types for each device type. If no event type is supported ("N/A"), then the ``sync_event`` member -should always be null. +should always be null. Remember that the event *CAN* be null if synchronization is not needed to access the data. @@ -352,7 +352,7 @@ Memory management ----------------- First and foremost: Out of everything in this interface, it is *only* the -data buffers themselves which reside in device memory (i.e. the ``buffers`` +data buffers themselves which reside in device memory (i.e. the ``buffers`` member of the ``ArrowArray`` struct). Everything else should be in CPU memory. @@ -408,7 +408,7 @@ see inconsistent data while the other is mutating it. Synchronization --------------- -If the ``sync_event`` member is non-NULL, the consumer should not attempt +If the ``sync_event`` member is non-NULL, the consumer should not attempt to access or read the data until they have synchronized on that event. If the ``sync_event`` member is NULL, then it MUST be safe to access the data without any synchronization necessary on the part of the consumer. @@ -501,7 +501,6 @@ could be used for any device: arr->array.release(&arr->array); } -======================= Device Stream Interface ======================= @@ -510,7 +509,7 @@ interface also specifies a higher-level structure for easing communication of streaming data within a single process. Semantics -========= +--------- An Arrow C device stream exposes a streaming source of data chunks, each with the same schema. Chunks are obtained by calling a blocking pull-style iteration @@ -520,7 +519,7 @@ to provide a stream of data on multiple device types, a producer should provide a separate stream object for each device type. Structure definition -==================== +-------------------- The C device stream interface is defined by a single ``struct`` definition: @@ -554,7 +553,7 @@ The C device stream interface is defined by a single ``struct`` definition: kept exactly as-is when these definitions are copied. The ArrowDeviceArrayStream structure ------------------------------------- +'''''''''''''''''''''''''''''''''''' The ``ArrowDeviceArrayStream`` provides a device type that can access the resulting data along with the required callbacks to interact with a @@ -627,20 +626,20 @@ streaming source of Arrow arrays. It has the following fields: handled by the producer, and especially by the release callback. Result lifetimes ----------------- +'''''''''''''''' The data returned by the ``get_schema`` and ``get_next`` callbacks must be released independantly. Their lifetimes are not tied to that of ``ArrowDeviceArrayStream``. Stream lifetime ---------------- +''''''''''''''' Lifetime of the C stream is managed using a release callback with similar usage as in :ref:`C data interface `. Thread safety -------------- +''''''''''''' The stream source is not assumed to be thread-safe. Consumers wanting to call ``get_next`` from several threads should ensure those calls are @@ -652,9 +651,9 @@ Interoperability with other interchange formats Other interchange APIs, such as the `CUDA Array Interface`_, include members to pass the shape and the data types of the data buffers being exported. This information is necessary to interpret the raw bytes in the -device data buffers that are being shared. Rather than store the -shape / types of the data alongside the ``ArrowDeviceArray``, users -should utilize the existing ``ArrowSchema`` structure to pass any data +device data buffers that are being shared. Rather than store the +shape / types of the data alongside the ``ArrowDeviceArray``, users +should utilize the existing ``ArrowSchema`` structure to pass any data type and shape information. Updating this specification diff --git a/docs/source/index.rst b/docs/source/index.rst index d01c74f9a482e..8407813bd7abb 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -46,8 +46,8 @@ target environment.** :class-card: contrib-card :shadow: none - Read about the Apache Arrow format - specifications and Protocols. + Read about the Apache Arrow format and its related specifications and + protocols. +++ @@ -57,17 +57,15 @@ target environment.** :color: primary :expand: - To the Specifications + To Specifications .. grid-item-card:: Development :class-card: contrib-card :shadow: none - Find the documentation on the topic of - contributions, reviews, building of the libraries - from source, building of the documentation, - continuous integration, benchmarks and the - release process. + Find documentation on building the libraries from source, building the + documentation, contributing and code reviews, continuous integration, + benchmarking, and the release process. +++ @@ -77,7 +75,7 @@ target environment.** :color: primary :expand: - To the Development + To Development .. _toc.columnar: diff --git a/docs/source/java/flight_sql_jdbc_driver.rst b/docs/source/java/flight_sql_jdbc_driver.rst index 3b45cdd8b84cd..0ace2185983a9 100644 --- a/docs/source/java/flight_sql_jdbc_driver.rst +++ b/docs/source/java/flight_sql_jdbc_driver.rst @@ -80,8 +80,8 @@ The components of the URI are as follows. * **HOSTNAME** is the hostname of the Flight SQL service. * **PORT** is the port of the Flight SQL service. -Additional options can be passed as query parameters. The supported -parameters are: +Additional options can be passed as query parameters. Parameter names are +case-sensitive. The supported parameters are: .. list-table:: :header-rows: 1 @@ -114,12 +114,26 @@ parameters are: - null - When TLS is enabled, the password for the certificate store + * - tlsRootCerts + - null + - Path to PEM-encoded root certificates for TLS - use this as + an alternative to ``trustStore`` + + * - clientCertificate + - null + - Path to PEM-encoded client mTLS certificate when the Flight + SQL server requires client verification. + + * - clientKey + - null + - Path to PEM-encoded client mTLS key when the Flight + SQL server requires client verification. + * - useEncryption - - false - - Whether to use TLS (the default is an insecure, plaintext - connection) + - true + - Whether to use TLS (the default is an encrypted connection) - * - username + * - user - null - The username for user/password authentication @@ -127,6 +141,20 @@ parameters are: - true - When TLS is enabled, whether to use the system certificate store + * - retainCookies + - true + - Whether to use cookies from the initial connection in subsequent + internal connections when retrieving streams from separate endpoints. + + * - retainAuth + - true + - Whether to use bearer tokens obtained from the initial connection + in subsequent internal connections used for retrieving streams + from separate endpoints. + +Note that URI values must be URI-encoded if they contain characters such +as !, @, $, etc. + Any URI parameters that are not handled by the driver are passed to the Flight SQL service as gRPC headers. For example, the following URI :: @@ -135,3 +163,14 @@ the Flight SQL service as gRPC headers. For example, the following URI :: This will connect without authentication or encryption, to a Flight SQL service running on ``localhost`` on port 12345. Each request will also include a `database=mydb` gRPC header. + +Connection parameters may also be supplied using the Properties object +when using the JDBC Driver Manager to connect. When supplying using +the Properties object, values should *not* be URI-encoded. + +Parameters specified by the URI supercede parameters supplied by the +Properties object. When calling the `user/password overload of +DriverManager#getConnection() +`_, +the username and password supplied on the URI supercede the username and +password arguments to the function call. diff --git a/docs/source/java/install.rst b/docs/source/java/install.rst index 47e91fb39ec7b..32c121573a692 100644 --- a/docs/source/java/install.rst +++ b/docs/source/java/install.rst @@ -83,6 +83,40 @@ arrow-vector, and arrow-memory-netty. +A bill of materials (BOM) module has been provided to simplify adding +Arrow modules. This eliminates the need to specify the version for +every module. An alternative to the above would be: + +.. code-block:: xml + + + + 4.0.0 + org.example + demo + 1.0-SNAPSHOT + + 15.0.0 + + + + org.apache.arrow + arrow-bom + ${arrow.version} + + + org.apache.arrow + arrow-vector + + + org.apache.arrow + arrow-memory-netty + + + + To use the Arrow Flight dependencies, also add the ``os-maven-plugin`` plugin. This plugin generates useful platform-dependent properties such as ``os.detected.name`` and ``os.detected.arch`` needed to resolve diff --git a/docs/source/python/extending_types.rst b/docs/source/python/extending_types.rst index b9e875ceebc74..ee92cebcb549c 100644 --- a/docs/source/python/extending_types.rst +++ b/docs/source/python/extending_types.rst @@ -68,34 +68,43 @@ message). See the :ref:`format_metadata_extension_types` section of the metadata specification for more details. -Pyarrow allows you to define such extension types from Python. - -There are currently two ways: - -* Subclassing :class:`PyExtensionType`: the (de)serialization is based on pickle. - This is a good option for an extension type that is only used from Python. -* Subclassing :class:`ExtensionType`: this allows to give a custom - Python-independent name and serialized metadata, that can potentially be - recognized by other (non-Python) Arrow implementations such as PySpark. +Pyarrow allows you to define such extension types from Python by subclassing +:class:`ExtensionType` and giving the derived class its own extension name +and serialization mechanism. The extension name and serialized metadata +can potentially be recognized by other (non-Python) Arrow implementations +such as PySpark. For example, we could define a custom UUID type for 128-bit numbers which can -be represented as ``FixedSizeBinary`` type with 16 bytes. -Using the first approach, we create a ``UuidType`` subclass, and implement the -``__reduce__`` method to ensure the class can be properly pickled:: +be represented as ``FixedSizeBinary`` type with 16 bytes:: - class UuidType(pa.PyExtensionType): + class UuidType(pa.ExtensionType): def __init__(self): - pa.PyExtensionType.__init__(self, pa.binary(16)) + super().__init__(pa.binary(16), "my_package.uuid") + + def __arrow_ext_serialize__(self): + # Since we don't have a parameterized type, we don't need extra + # metadata to be deserialized + return b'' - def __reduce__(self): - return UuidType, () + @classmethod + def __arrow_ext_deserialize__(cls, storage_type, serialized): + # Sanity checks, not required but illustrate the method signature. + assert storage_type == pa.binary(16) + assert serialized == b'' + # Return an instance of this subclass given the serialized + # metadata. + return UuidType() + +The special methods ``__arrow_ext_serialize__`` and ``__arrow_ext_deserialize__`` +define the serialization of an extension type instance. For non-parametric +types such as the above, the serialization payload can be left empty. This can now be used to create arrays and tables holding the extension type:: >>> uuid_type = UuidType() >>> uuid_type.extension_name - 'arrow.py_extension_type' + 'my_package.uuid' >>> uuid_type.storage_type FixedSizeBinaryType(fixed_size_binary[16]) @@ -112,8 +121,11 @@ This can now be used to create arrays and tables holding the extension type:: ] This array can be included in RecordBatches, sent over IPC and received in -another Python process. The custom UUID type will be preserved there, as long -as the definition of the class is available (the type can be unpickled). +another Python process. The receiving process must explicitly register the +extension type for deserialization, otherwise it will fall back to the +storage type:: + + >>> pa.register_extension_type(UuidType()) For example, creating a RecordBatch and writing it to a stream using the IPC protocol:: @@ -129,43 +141,12 @@ and then reading it back yields the proper type:: >>> with pa.ipc.open_stream(buf) as reader: ... result = reader.read_all() >>> result.column('ext').type - UuidType(extension) - -We can define the same type using the other option:: - - class UuidType(pa.ExtensionType): - - def __init__(self): - pa.ExtensionType.__init__(self, pa.binary(16), "my_package.uuid") - - def __arrow_ext_serialize__(self): - # since we don't have a parameterized type, we don't need extra - # metadata to be deserialized - return b'' - - @classmethod - def __arrow_ext_deserialize__(self, storage_type, serialized): - # return an instance of this subclass given the serialized - # metadata. - return UuidType() - -This is a slightly longer implementation (you need to implement the special -methods ``__arrow_ext_serialize__`` and ``__arrow_ext_deserialize__``), and the -extension type needs to be registered to be received through IPC (using -:func:`register_extension_type`), but it has -now a unique name:: - - >>> uuid_type = UuidType() - >>> uuid_type.extension_name - 'my_package.uuid' - - >>> pa.register_extension_type(uuid_type) + UuidType(FixedSizeBinaryType(fixed_size_binary[16])) The receiving application doesn't need to be Python but can still recognize -the extension type as a "uuid" type, if it has implemented its own extension -type to receive it. -If the type is not registered in the receiving application, it will fall back -to the storage type. +the extension type as a "my_package.uuid" type, if it has implemented its own +extension type to receive it. If the type is not registered in the receiving +application, it will fall back to the storage type. Parameterized extension type ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -187,7 +168,7 @@ of the given frequency since 1970. # attributes need to be set first before calling # super init (as that calls serialize) self._freq = freq - pa.ExtensionType.__init__(self, pa.int64(), 'my_package.period') + super().__init__(pa.int64(), 'my_package.period') @property def freq(self): @@ -198,7 +179,7 @@ of the given frequency since 1970. @classmethod def __arrow_ext_deserialize__(cls, storage_type, serialized): - # return an instance of this subclass given the serialized + # Return an instance of this subclass given the serialized # metadata. serialized = serialized.decode() assert serialized.startswith("freq=") @@ -209,31 +190,10 @@ Here, we ensure to store all information in the serialized metadata that is needed to reconstruct the instance (in the ``__arrow_ext_deserialize__`` class method), in this case the frequency string. -Note that, once created, the data type instance is considered immutable. If, -in the example above, the ``freq`` parameter would change after instantiation, -the reconstruction of the type instance after IPC will be incorrect. +Note that, once created, the data type instance is considered immutable. In the example above, the ``freq`` parameter is therefore stored in a private attribute with a public read-only property to access it. -Parameterized extension types are also possible using the pickle-based type -subclassing :class:`PyExtensionType`. The equivalent example for the period -data type from above would look like:: - - class PeriodType(pa.PyExtensionType): - - def __init__(self, freq): - self._freq = freq - pa.PyExtensionType.__init__(self, pa.int64()) - - @property - def freq(self): - return self._freq - - def __reduce__(self): - return PeriodType, (self.freq,) - -Also the storage type does not need to be fixed but can be parameterized. - Custom extension array class ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -252,12 +212,16 @@ the data as a 2-D Numpy array ``(N, 3)`` without any copy:: return self.storage.flatten().to_numpy().reshape((-1, 3)) - class Point3DType(pa.PyExtensionType): + class Point3DType(pa.ExtensionType): def __init__(self): - pa.PyExtensionType.__init__(self, pa.list_(pa.float32(), 3)) + super().__init__(pa.list_(pa.float32(), 3), "my_package.Point3DType") - def __reduce__(self): - return Point3DType, () + def __arrow_ext_serialize__(self): + return b'' + + @classmethod + def __arrow_ext_deserialize__(cls, storage_type, serialized): + return Point3DType() def __arrow_ext_class__(self): return Point3DArray @@ -289,11 +253,8 @@ The additional methods in the extension class are then available to the user:: This array can be sent over IPC, received in another Python process, and the custom -extension array class will be preserved (as long as the definitions of the classes above -are available). - -The same ``__arrow_ext_class__`` specialization can be used with custom types defined -by subclassing :class:`ExtensionType`. +extension array class will be preserved (as long as the receiving process registers +the extension type using :func:`register_extension_type` before reading the IPC data). Custom scalar conversion ~~~~~~~~~~~~~~~~~~~~~~~~ @@ -304,18 +265,24 @@ If you want scalars of your custom extension type to convert to a custom type wh For example, if we wanted the above example 3D point type to return a custom 3D point class instead of a list, we would implement:: + from collections import namedtuple + Point3D = namedtuple("Point3D", ["x", "y", "z"]) class Point3DScalar(pa.ExtensionScalar): def as_py(self) -> Point3D: return Point3D(*self.value.as_py()) - class Point3DType(pa.PyExtensionType): + class Point3DType(pa.ExtensionType): def __init__(self): - pa.PyExtensionType.__init__(self, pa.list_(pa.float32(), 3)) + super().__init__(pa.list_(pa.float32(), 3), "my_package.Point3DType") - def __reduce__(self): - return Point3DType, () + def __arrow_ext_serialize__(self): + return b'' + + @classmethod + def __arrow_ext_deserialize__(cls, storage_type, serialized): + return Point3DType() def __arrow_ext_scalar_class__(self): return Point3DScalar diff --git a/docs/source/status.rst b/docs/source/status.rst index c8c0e6dfc1dfe..fee9a27b6ca1a 100644 --- a/docs/source/status.rst +++ b/docs/source/status.rst @@ -40,7 +40,7 @@ Data Types +-------------------+-------+-------+-------+------------+-------+-------+-------+-------+ | UInt8/16/32/64 | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | +-------------------+-------+-------+-------+------------+-------+-------+-------+-------+ -| Float16 | | | ✓ | ✓ | ✓ (1)| ✓ | ✓ | | +| Float16 | ✓ (1) | | ✓ | ✓ | ✓ (2)| ✓ | ✓ | | +-------------------+-------+-------+-------+------------+-------+-------+-------+-------+ | Float32/64 | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | +-------------------+-------+-------+-------+------------+-------+-------+-------+-------+ @@ -68,6 +68,10 @@ Data Types +-------------------+-------+-------+-------+------------+-------+-------+-------+-------+ | Large Utf8 | ✓ | ✓ | ✓ | | | ✓ | ✓ | | +-------------------+-------+-------+-------+------------+-------+-------+-------+-------+ +| Binary View | ✓ | | ✓ | | | | | | ++-------------------+-------+-------+-------+------------+-------+-------+-------+-------+ +| String View | ✓ | | ✓ | | | | | | ++-------------------+-------+-------+-------+------------+-------+-------+-------+-------+ +-------------------+-------+-------+-------+------------+-------+-------+-------+-------+ | Data type | C++ | Java | Go | JavaScript | C# | Rust | Julia | Swift | @@ -92,7 +96,7 @@ Data Types | Data type | C++ | Java | Go | JavaScript | C# | Rust | Julia | Swift | | (special) | | | | | | | | | +===================+=======+=======+=======+============+=======+=======+=======+=======+ -| Dictionary | ✓ | ✓ (2) | ✓ | ✓ | ✓ (2) | ✓ (2) | ✓ | | +| Dictionary | ✓ | ✓ (3) | ✓ | ✓ | ✓ (3) | ✓ (3) | ✓ | | +-------------------+-------+-------+-------+------------+-------+-------+-------+-------+ | Extension | ✓ | ✓ | ✓ | | | ✓ | ✓ | | +-------------------+-------+-------+-------+------------+-------+-------+-------+-------+ @@ -101,8 +105,9 @@ Data Types Notes: -* \(1) Float16 support in C# is only available when targeting .NET 6+. -* \(2) Nested dictionaries not supported +* \(1) Casting to/from Float16 in C++ is not supported. +* \(2) Float16 support in C# is only available when targeting .NET 6+. +* \(3) Nested dictionaries not supported .. seealso:: The :ref:`format_columnar` specification. diff --git a/format/Schema.fbs b/format/Schema.fbs index 6adbcb115cde3..dbf482e6cc786 100644 --- a/format/Schema.fbs +++ b/format/Schema.fbs @@ -40,7 +40,7 @@ enum MetadataVersion:short { /// >= 0.8.0 (December 2017). Non-backwards compatible with V3. V4, - /// >= 1.0.0 (July 2020. Backwards compatible with V4 (V5 readers can read V4 + /// >= 1.0.0 (July 2020). Backwards compatible with V4 (V5 readers can read V4 /// metadata and IPC messages). Implementations are recommended to provide a /// V4 compatibility mode with V5 format changes disabled. /// diff --git a/go/README.md b/go/README.md index 660549cb1b366..c45bcd756f81c 100644 --- a/go/README.md +++ b/go/README.md @@ -20,7 +20,7 @@ Apache Arrow for Go =================== -[![Go Reference](https://pkg.go.dev/badge/github.com/apache/arrow/go/v14.svg)](https://pkg.go.dev/github.com/apache/arrow/go/v14) +[![Go Reference](https://pkg.go.dev/badge/github.com/apache/arrow/go/v15.svg)](https://pkg.go.dev/github.com/apache/arrow/go/v15) [Apache Arrow][arrow] is a cross-language development platform for in-memory data. It specifies a standardized language-independent columnar memory format diff --git a/go/arrow/_examples/helloworld/main.go b/go/arrow/_examples/helloworld/main.go index 02601be694400..156a52b86da1c 100644 --- a/go/arrow/_examples/helloworld/main.go +++ b/go/arrow/_examples/helloworld/main.go @@ -19,10 +19,10 @@ package main import ( "os" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/math" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/math" + "github.com/apache/arrow/go/v15/arrow/memory" ) func main() { diff --git a/go/arrow/_tools/tmpl/main.go b/go/arrow/_tools/tmpl/main.go index 87c4b6ed02c12..c591c3b96803c 100644 --- a/go/arrow/_tools/tmpl/main.go +++ b/go/arrow/_tools/tmpl/main.go @@ -28,7 +28,7 @@ import ( "strings" "text/template" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/internal/json" ) const Ext = ".tmpl" diff --git a/go/arrow/array.go b/go/arrow/array.go index 7622e7503e0b6..e07fa478aae57 100644 --- a/go/arrow/array.go +++ b/go/arrow/array.go @@ -19,8 +19,8 @@ package arrow import ( "fmt" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) // ArrayData is the underlying memory and metadata of an Arrow array, corresponding diff --git a/go/arrow/array/array.go b/go/arrow/array/array.go index 1ee04c7aa2bcc..5aacc8f99a4ee 100644 --- a/go/arrow/array/array.go +++ b/go/arrow/array/array.go @@ -19,9 +19,9 @@ package array import ( "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/internal/debug" ) const ( @@ -178,7 +178,8 @@ func init() { arrow.RUN_END_ENCODED: func(data arrow.ArrayData) arrow.Array { return NewRunEndEncodedData(data) }, arrow.LIST_VIEW: func(data arrow.ArrayData) arrow.Array { return NewListViewData(data) }, arrow.LARGE_LIST_VIEW: func(data arrow.ArrayData) arrow.Array { return NewLargeListViewData(data) }, - + arrow.BINARY_VIEW: func(data arrow.ArrayData) arrow.Array { return NewBinaryViewData(data) }, + arrow.STRING_VIEW: func(data arrow.ArrayData) arrow.Array { return NewStringViewData(data) }, // invalid data types to fill out array to size 2^6 - 1 63: invalidDataType, } diff --git a/go/arrow/array/array_test.go b/go/arrow/array/array_test.go index 6139548ddec7d..bbfbee83585da 100644 --- a/go/arrow/array/array_test.go +++ b/go/arrow/array/array_test.go @@ -19,11 +19,11 @@ package array_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/internal/testing/tools" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/types" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/internal/testing/tools" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/types" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/binary.go b/go/arrow/array/binary.go index e9e6e66e7e813..c226297da04c6 100644 --- a/go/arrow/array/binary.go +++ b/go/arrow/array/binary.go @@ -23,8 +23,9 @@ import ( "strings" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) type BinaryLike interface { @@ -318,6 +319,126 @@ func arrayEqualLargeBinary(left, right *LargeBinary) bool { return true } +type ViewLike interface { + arrow.Array + ValueHeader(int) *arrow.ViewHeader +} + +type BinaryView struct { + array + values []arrow.ViewHeader + dataBuffers []*memory.Buffer +} + +func NewBinaryViewData(data arrow.ArrayData) *BinaryView { + a := &BinaryView{} + a.refCount = 1 + a.setData(data.(*Data)) + return a +} + +func (a *BinaryView) setData(data *Data) { + if len(data.buffers) < 2 { + panic("len(data.buffers) < 2") + } + a.array.setData(data) + + if valueData := data.buffers[1]; valueData != nil { + a.values = arrow.ViewHeaderTraits.CastFromBytes(valueData.Bytes()) + } + + a.dataBuffers = data.buffers[2:] +} + +func (a *BinaryView) ValueHeader(i int) *arrow.ViewHeader { + if i < 0 || i >= a.array.data.length { + panic("arrow/array: index out of range") + } + return &a.values[a.array.data.offset+i] +} + +func (a *BinaryView) Value(i int) []byte { + s := a.ValueHeader(i) + if s.IsInline() { + return s.InlineBytes() + } + start := s.BufferOffset() + buf := a.dataBuffers[s.BufferIndex()] + return buf.Bytes()[start : start+int32(s.Len())] +} + +// ValueString returns the value at index i as a string instead of +// a byte slice, without copying the underlying data. +func (a *BinaryView) ValueString(i int) string { + b := a.Value(i) + return *(*string)(unsafe.Pointer(&b)) +} + +func (a *BinaryView) String() string { + var o strings.Builder + o.WriteString("[") + for i := 0; i < a.Len(); i++ { + if i > 0 { + o.WriteString(" ") + } + switch { + case a.IsNull(i): + o.WriteString(NullValueStr) + default: + fmt.Fprintf(&o, "%q", a.ValueString(i)) + } + } + o.WriteString("]") + return o.String() +} + +// ValueStr is paired with AppendValueFromString in that it returns +// the value at index i as a string: Semantically this means that for +// a null value it will return the string "(null)", otherwise it will +// return the value as a base64 encoded string suitable for CSV/JSON. +// +// This is always going to be less performant than just using ValueString +// and exists to fulfill the Array interface to provide a method which +// can produce a human readable string for a given index. +func (a *BinaryView) ValueStr(i int) string { + if a.IsNull(i) { + return NullValueStr + } + return base64.StdEncoding.EncodeToString(a.Value(i)) +} + +func (a *BinaryView) GetOneForMarshal(i int) interface{} { + if a.IsNull(i) { + return nil + } + return a.Value(i) +} + +func (a *BinaryView) MarshalJSON() ([]byte, error) { + vals := make([]interface{}, a.Len()) + for i := 0; i < a.Len(); i++ { + vals[i] = a.GetOneForMarshal(i) + } + // golang marshal standard says that []byte will be marshalled + // as a base64-encoded string + return json.Marshal(vals) +} + +func arrayEqualBinaryView(left, right *BinaryView) bool { + leftBufs, rightBufs := left.dataBuffers, right.dataBuffers + for i := 0; i < left.Len(); i++ { + if left.IsNull(i) { + continue + } + if !left.ValueHeader(i).Equals(leftBufs, right.ValueHeader(i), rightBufs) { + return false + } + } + return true +} + var ( _ arrow.Array = (*Binary)(nil) + _ arrow.Array = (*LargeBinary)(nil) + _ arrow.Array = (*BinaryView)(nil) ) diff --git a/go/arrow/array/binary_test.go b/go/arrow/array/binary_test.go index 5febb55691d7d..c9e165515225b 100644 --- a/go/arrow/array/binary_test.go +++ b/go/arrow/array/binary_test.go @@ -20,9 +20,9 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) @@ -700,3 +700,27 @@ func TestBinaryStringRoundTrip(t *testing.T) { assert.True(t, Equal(arr, arr1)) } + +func TestBinaryViewStringRoundTrip(t *testing.T) { + mem := memory.NewCheckedAllocator(memory.DefaultAllocator) + defer mem.AssertSize(t, 0) + + values := []string{"a", "bc", "", "", "supercalifragilistic", "", "expeallodocious"} + valid := []bool{true, true, false, false, true, true, true} + + b := NewBinaryViewBuilder(mem) + defer b.Release() + + b.AppendStringValues(values, valid) + arr := b.NewArray().(*BinaryView) + defer arr.Release() + + for i := 0; i < arr.Len(); i++ { + assert.NoError(t, b.AppendValueFromString(arr.ValueStr(i))) + } + + arr1 := b.NewArray().(*BinaryView) + defer arr1.Release() + + assert.True(t, Equal(arr, arr1)) +} diff --git a/go/arrow/array/binarybuilder.go b/go/arrow/array/binarybuilder.go index 3cb709b45b7a1..21ad576508e9e 100644 --- a/go/arrow/array/binarybuilder.go +++ b/go/arrow/array/binarybuilder.go @@ -23,11 +23,12 @@ import ( "math" "reflect" "sync/atomic" + "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) // A BinaryBuilder is used to build a Binary array using the Append methods. @@ -370,6 +371,334 @@ func (b *BinaryBuilder) UnmarshalJSON(data []byte) error { return b.Unmarshal(dec) } +const ( + dfltBlockSize = 32 << 10 // 32 KB + viewValueSizeLimit int32 = math.MaxInt32 +) + +type BinaryViewBuilder struct { + builder + dtype arrow.BinaryDataType + + data *memory.Buffer + rawData []arrow.ViewHeader + + blockBuilder multiBufferBuilder +} + +func NewBinaryViewBuilder(mem memory.Allocator) *BinaryViewBuilder { + return &BinaryViewBuilder{ + dtype: arrow.BinaryTypes.BinaryView, + builder: builder{ + refCount: 1, + mem: mem, + }, + blockBuilder: multiBufferBuilder{ + refCount: 1, + blockSize: dfltBlockSize, + mem: mem, + }, + } +} + +func (b *BinaryViewBuilder) SetBlockSize(sz uint) { + b.blockBuilder.blockSize = int(sz) +} + +func (b *BinaryViewBuilder) Type() arrow.DataType { return b.dtype } + +func (b *BinaryViewBuilder) Release() { + debug.Assert(atomic.LoadInt64(&b.refCount) > 0, "too many releases") + + if atomic.AddInt64(&b.refCount, -1) != 0 { + return + } + + if b.nullBitmap != nil { + b.nullBitmap.Release() + b.nullBitmap = nil + } + if b.data != nil { + b.data.Release() + b.data = nil + b.rawData = nil + } +} + +func (b *BinaryViewBuilder) init(capacity int) { + b.builder.init(capacity) + b.data = memory.NewResizableBuffer(b.mem) + bytesN := arrow.ViewHeaderTraits.BytesRequired(capacity) + b.data.Resize(bytesN) + b.rawData = arrow.ViewHeaderTraits.CastFromBytes(b.data.Bytes()) +} + +func (b *BinaryViewBuilder) Resize(n int) { + nbuild := n + if n < minBuilderCapacity { + n = minBuilderCapacity + } + + if b.capacity == 0 { + b.init(n) + return + } + + b.builder.resize(nbuild, b.init) + b.data.Resize(arrow.ViewHeaderTraits.BytesRequired(n)) + b.rawData = arrow.ViewHeaderTraits.CastFromBytes(b.data.Bytes()) +} + +func (b *BinaryViewBuilder) ReserveData(length int) { + if int32(length) > viewValueSizeLimit { + panic(fmt.Errorf("%w: BinaryView or StringView elements cannot reference strings larger than 2GB", + arrow.ErrInvalid)) + } + b.blockBuilder.Reserve(int(length)) +} + +func (b *BinaryViewBuilder) Reserve(n int) { + b.builder.reserve(n, b.Resize) +} + +func (b *BinaryViewBuilder) Append(v []byte) { + if int32(len(v)) > viewValueSizeLimit { + panic(fmt.Errorf("%w: BinaryView or StringView elements cannot reference strings larger than 2GB", arrow.ErrInvalid)) + } + + if !arrow.IsViewInline(len(v)) { + b.ReserveData(len(v)) + } + + b.Reserve(1) + b.UnsafeAppend(v) +} + +// AppendString is identical to Append, only accepting a string instead +// of a byte slice, avoiding the extra copy that would occur if you simply +// did []byte(v). +// +// This is different than AppendValueFromString which exists for the +// Builder interface, in that this expects raw binary data which is +// appended unmodified. AppendValueFromString expects base64 encoded binary +// data instead. +func (b *BinaryViewBuilder) AppendString(v string) { + // create a []byte without copying the bytes + // in go1.20 this would be unsafe.StringData + val := *(*[]byte)(unsafe.Pointer(&struct { + string + int + }{v, len(v)})) + b.Append(val) +} + +func (b *BinaryViewBuilder) AppendNull() { + b.Reserve(1) + b.UnsafeAppendBoolToBitmap(false) +} + +func (b *BinaryViewBuilder) AppendNulls(n int) { + b.Reserve(n) + for i := 0; i < n; i++ { + b.UnsafeAppendBoolToBitmap(false) + } +} + +func (b *BinaryViewBuilder) AppendEmptyValue() { + b.Reserve(1) + b.UnsafeAppendBoolToBitmap(true) +} + +func (b *BinaryViewBuilder) AppendEmptyValues(n int) { + b.Reserve(n) + b.unsafeAppendBoolsToBitmap(nil, n) +} + +func (b *BinaryViewBuilder) UnsafeAppend(v []byte) { + hdr := &b.rawData[b.length] + hdr.SetBytes(v) + if !hdr.IsInline() { + b.blockBuilder.UnsafeAppend(hdr, v) + } + b.UnsafeAppendBoolToBitmap(true) +} + +func (b *BinaryViewBuilder) AppendValues(v [][]byte, valid []bool) { + if len(v) != len(valid) && len(valid) != 0 { + panic("len(v) != len(valid) && len(valid) != 0") + } + + if len(v) == 0 { + return + } + + b.Reserve(len(v)) + outOfLineTotal := 0 + for i, vv := range v { + if len(valid) == 0 || valid[i] { + if !arrow.IsViewInline(len(vv)) { + outOfLineTotal += len(vv) + } + } + } + + b.ReserveData(outOfLineTotal) + for i, vv := range v { + if len(valid) == 0 || valid[i] { + hdr := &b.rawData[b.length+i] + hdr.SetBytes(vv) + if !hdr.IsInline() { + b.blockBuilder.UnsafeAppend(hdr, vv) + } + } + } + + b.builder.unsafeAppendBoolsToBitmap(valid, len(v)) +} + +func (b *BinaryViewBuilder) AppendStringValues(v []string, valid []bool) { + if len(v) != len(valid) && len(valid) != 0 { + panic("len(v) != len(valid) && len(valid) != 0") + } + + if len(v) == 0 { + return + } + + b.Reserve(len(v)) + outOfLineTotal := 0 + for i, vv := range v { + if len(valid) == 0 || valid[i] { + if !arrow.IsViewInline(len(vv)) { + outOfLineTotal += len(vv) + } + } + } + + b.ReserveData(outOfLineTotal) + for i, vv := range v { + if len(valid) == 0 || valid[i] { + hdr := &b.rawData[b.length+i] + hdr.SetString(vv) + if !hdr.IsInline() { + b.blockBuilder.UnsafeAppendString(hdr, vv) + } + } + } + + b.builder.unsafeAppendBoolsToBitmap(valid, len(v)) +} + +// AppendValueFromString is paired with ValueStr for fulfilling the +// base Builder interface. This is intended to read in a human-readable +// string such as from CSV or JSON and append it to the array. +// +// For Binary values are expected to be base64 encoded (and will be +// decoded as such before being appended). +func (b *BinaryViewBuilder) AppendValueFromString(s string) error { + if s == NullValueStr { + b.AppendNull() + return nil + } + + if b.dtype.IsUtf8() { + b.Append([]byte(s)) + return nil + } + + decodedVal, err := base64.StdEncoding.DecodeString(s) + if err != nil { + return fmt.Errorf("could not decode base64 string: %w", err) + } + b.Append(decodedVal) + return nil +} + +func (b *BinaryViewBuilder) UnmarshalOne(dec *json.Decoder) error { + t, err := dec.Token() + if err != nil { + return err + } + + switch v := t.(type) { + case string: + data, err := base64.StdEncoding.DecodeString(v) + if err != nil { + return err + } + b.Append(data) + case []byte: + b.Append(v) + case nil: + b.AppendNull() + default: + return &json.UnmarshalTypeError{ + Value: fmt.Sprint(t), + Type: reflect.TypeOf([]byte{}), + Offset: dec.InputOffset(), + } + } + return nil +} + +func (b *BinaryViewBuilder) Unmarshal(dec *json.Decoder) error { + for dec.More() { + if err := b.UnmarshalOne(dec); err != nil { + return err + } + } + return nil +} + +func (b *BinaryViewBuilder) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) + t, err := dec.Token() + if err != nil { + return err + } + + if delim, ok := t.(json.Delim); !ok || delim != '[' { + return fmt.Errorf("binary view builder must unpack from json array, found %s", delim) + } + + return b.Unmarshal(dec) +} + +func (b *BinaryViewBuilder) newData() (data *Data) { + bytesRequired := arrow.ViewHeaderTraits.BytesRequired(b.length) + if bytesRequired > 0 && bytesRequired < b.data.Len() { + // trim buffers + b.data.Resize(bytesRequired) + } + + dataBuffers := b.blockBuilder.Finish() + data = NewData(b.dtype, b.length, append([]*memory.Buffer{ + b.nullBitmap, b.data}, dataBuffers...), nil, b.nulls, 0) + b.reset() + + if b.data != nil { + b.data.Release() + b.data = nil + b.rawData = nil + for _, buf := range dataBuffers { + buf.Release() + } + } + return +} + +func (b *BinaryViewBuilder) NewBinaryViewArray() (a *BinaryView) { + data := b.newData() + a = NewBinaryViewData(data) + data.Release() + return +} + +func (b *BinaryViewBuilder) NewArray() arrow.Array { + return b.NewBinaryViewArray() +} + var ( _ Builder = (*BinaryBuilder)(nil) + _ Builder = (*BinaryViewBuilder)(nil) ) diff --git a/go/arrow/array/binarybuilder_test.go b/go/arrow/array/binarybuilder_test.go index c63307433bcca..96be73da6516d 100644 --- a/go/arrow/array/binarybuilder_test.go +++ b/go/arrow/array/binarybuilder_test.go @@ -20,9 +20,9 @@ import ( "bytes" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/boolean.go b/go/arrow/array/boolean.go index 464cef48bdf32..43bac64a4c990 100644 --- a/go/arrow/array/boolean.go +++ b/go/arrow/array/boolean.go @@ -21,10 +21,10 @@ import ( "strconv" "strings" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) // A type which represents an immutable sequence of boolean values. diff --git a/go/arrow/array/boolean_test.go b/go/arrow/array/boolean_test.go index c49ec08624cc6..bcd17ee5967d6 100644 --- a/go/arrow/array/boolean_test.go +++ b/go/arrow/array/boolean_test.go @@ -22,8 +22,8 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/booleanbuilder.go b/go/arrow/array/booleanbuilder.go index 10b7405aa5b2a..cd0cffd5e43e2 100644 --- a/go/arrow/array/booleanbuilder.go +++ b/go/arrow/array/booleanbuilder.go @@ -23,11 +23,11 @@ import ( "strconv" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) type BooleanBuilder struct { diff --git a/go/arrow/array/booleanbuilder_test.go b/go/arrow/array/booleanbuilder_test.go index e270636a87b1c..f6f6c80dab37f 100644 --- a/go/arrow/array/booleanbuilder_test.go +++ b/go/arrow/array/booleanbuilder_test.go @@ -19,9 +19,9 @@ package array_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/internal/testing/tools" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/internal/testing/tools" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/bufferbuilder.go b/go/arrow/array/bufferbuilder.go index e023b0d907421..13741ba8926ac 100644 --- a/go/arrow/array/bufferbuilder.go +++ b/go/arrow/array/bufferbuilder.go @@ -18,10 +18,12 @@ package array import ( "sync/atomic" + "unsafe" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" ) type bufBuilder interface { @@ -151,3 +153,109 @@ func (b *bufferBuilder) unsafeAppend(data []byte) { copy(b.bytes[b.length:], data) b.length += len(data) } + +type multiBufferBuilder struct { + refCount int64 + blockSize int + + mem memory.Allocator + blocks []*memory.Buffer + currentOutBuffer int +} + +// Retain increases the reference count by 1. +// Retain may be called simultaneously from multiple goroutines. +func (b *multiBufferBuilder) Retain() { + atomic.AddInt64(&b.refCount, 1) +} + +// Release decreases the reference count by 1. +// When the reference count goes to zero, the memory is freed. +// Release may be called simultaneously from multiple goroutines. +func (b *multiBufferBuilder) Release() { + debug.Assert(atomic.LoadInt64(&b.refCount) > 0, "too many releases") + + if atomic.AddInt64(&b.refCount, -1) == 0 { + b.Reset() + } +} + +func (b *multiBufferBuilder) Reserve(nbytes int) { + if len(b.blocks) == 0 { + out := memory.NewResizableBuffer(b.mem) + if nbytes < b.blockSize { + nbytes = b.blockSize + } + out.Reserve(nbytes) + b.currentOutBuffer = 0 + b.blocks = []*memory.Buffer{out} + return + } + + curBuf := b.blocks[b.currentOutBuffer] + remain := curBuf.Cap() - curBuf.Len() + if nbytes <= remain { + return + } + + // search for underfull block that has enough bytes + for i, block := range b.blocks { + remaining := block.Cap() - block.Len() + if nbytes <= remaining { + b.currentOutBuffer = i + return + } + } + + // current buffer doesn't have enough space, no underfull buffers + // make new buffer and set that as our current. + newBuf := memory.NewResizableBuffer(b.mem) + if nbytes < b.blockSize { + nbytes = b.blockSize + } + + newBuf.Reserve(nbytes) + b.currentOutBuffer = len(b.blocks) + b.blocks = append(b.blocks, newBuf) +} + +func (b *multiBufferBuilder) RemainingBytes() int { + if len(b.blocks) == 0 { + return 0 + } + + buf := b.blocks[b.currentOutBuffer] + return buf.Cap() - buf.Len() +} + +func (b *multiBufferBuilder) Reset() { + b.currentOutBuffer = 0 + for _, block := range b.Finish() { + block.Release() + } +} + +func (b *multiBufferBuilder) UnsafeAppend(hdr *arrow.ViewHeader, val []byte) { + buf := b.blocks[b.currentOutBuffer] + idx, offset := b.currentOutBuffer, buf.Len() + hdr.SetIndexOffset(int32(idx), int32(offset)) + + n := copy(buf.Buf()[offset:], val) + buf.ResizeNoShrink(offset + n) +} + +func (b *multiBufferBuilder) UnsafeAppendString(hdr *arrow.ViewHeader, val string) { + // create a byte slice with zero-copies + // in go1.20 this would be equivalent to unsafe.StringData + v := *(*[]byte)(unsafe.Pointer(&struct { + string + int + }{val, len(val)})) + b.UnsafeAppend(hdr, v) +} + +func (b *multiBufferBuilder) Finish() (out []*memory.Buffer) { + b.currentOutBuffer = 0 + out, b.blocks = b.blocks, nil + return +} diff --git a/go/arrow/array/bufferbuilder_byte.go b/go/arrow/array/bufferbuilder_byte.go index 00a0d1c21b3ef..9b2b559ba9f68 100644 --- a/go/arrow/array/bufferbuilder_byte.go +++ b/go/arrow/array/bufferbuilder_byte.go @@ -16,7 +16,7 @@ package array -import "github.com/apache/arrow/go/v14/arrow/memory" +import "github.com/apache/arrow/go/v15/arrow/memory" type byteBufferBuilder struct { bufferBuilder diff --git a/go/arrow/array/bufferbuilder_numeric.gen.go b/go/arrow/array/bufferbuilder_numeric.gen.go index 879bc9f571152..a7961166c0edd 100644 --- a/go/arrow/array/bufferbuilder_numeric.gen.go +++ b/go/arrow/array/bufferbuilder_numeric.gen.go @@ -19,9 +19,9 @@ package array import ( - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" ) type int64BufferBuilder struct { diff --git a/go/arrow/array/bufferbuilder_numeric.gen.go.tmpl b/go/arrow/array/bufferbuilder_numeric.gen.go.tmpl index e859b5bff02a1..845d7ef01c89a 100644 --- a/go/arrow/array/bufferbuilder_numeric.gen.go.tmpl +++ b/go/arrow/array/bufferbuilder_numeric.gen.go.tmpl @@ -17,9 +17,9 @@ package array import ( - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" ) {{range .In}} diff --git a/go/arrow/array/bufferbuilder_numeric_test.go b/go/arrow/array/bufferbuilder_numeric_test.go index ba0a60e5b703b..b51e9ae9207ea 100644 --- a/go/arrow/array/bufferbuilder_numeric_test.go +++ b/go/arrow/array/bufferbuilder_numeric_test.go @@ -20,8 +20,8 @@ import ( "testing" "unsafe" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/endian" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/builder.go b/go/arrow/array/builder.go index 2f15ac965e07c..279804a1cdb9f 100644 --- a/go/arrow/array/builder.go +++ b/go/arrow/array/builder.go @@ -20,10 +20,10 @@ import ( "fmt" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) const ( @@ -364,6 +364,10 @@ func NewBuilder(mem memory.Allocator, dtype arrow.DataType) Builder { case arrow.RUN_END_ENCODED: typ := dtype.(*arrow.RunEndEncodedType) return NewRunEndEncodedBuilder(mem, typ.RunEnds(), typ.Encoded()) + case arrow.BINARY_VIEW: + return NewBinaryViewBuilder(mem) + case arrow.STRING_VIEW: + return NewStringViewBuilder(mem) } panic(fmt.Errorf("arrow/array: unsupported builder for %T", dtype)) } diff --git a/go/arrow/array/builder_test.go b/go/arrow/array/builder_test.go index 3cacb54f725e7..7bec86d86cc8b 100644 --- a/go/arrow/array/builder_test.go +++ b/go/arrow/array/builder_test.go @@ -19,8 +19,8 @@ package array import ( "testing" - "github.com/apache/arrow/go/v14/arrow/internal/testing/tools" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/internal/testing/tools" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/compare.go b/go/arrow/array/compare.go index e70716bee91a7..372293a61d6cb 100644 --- a/go/arrow/array/compare.go +++ b/go/arrow/array/compare.go @@ -20,9 +20,9 @@ import ( "fmt" "math" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/float16" - "github.com/apache/arrow/go/v14/internal/bitutils" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/internal/bitutils" ) // RecordEqual reports whether the two provided records are equal. @@ -232,6 +232,12 @@ func Equal(left, right arrow.Array) bool { case *LargeString: r := right.(*LargeString) return arrayEqualLargeString(l, r) + case *BinaryView: + r := right.(*BinaryView) + return arrayEqualBinaryView(l, r) + case *StringView: + r := right.(*StringView) + return arrayEqualStringView(l, r) case *Int8: r := right.(*Int8) return arrayEqualInt8(l, r) @@ -482,6 +488,12 @@ func arrayApproxEqual(left, right arrow.Array, opt equalOption) bool { case *LargeString: r := right.(*LargeString) return arrayEqualLargeString(l, r) + case *BinaryView: + r := right.(*BinaryView) + return arrayEqualBinaryView(l, r) + case *StringView: + r := right.(*StringView) + return arrayEqualStringView(l, r) case *Int8: r := right.(*Int8) return arrayEqualInt8(l, r) diff --git a/go/arrow/array/compare_test.go b/go/arrow/array/compare_test.go index 51421aa04fe9a..4fc9cf50e8643 100644 --- a/go/arrow/array/compare_test.go +++ b/go/arrow/array/compare_test.go @@ -22,11 +22,11 @@ import ( "sort" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/float16" - "github.com/apache/arrow/go/v14/arrow/internal/arrdata" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/arrow/internal/arrdata" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/concat.go b/go/arrow/array/concat.go index 9d815023c4b76..fa3554c1c0555 100644 --- a/go/arrow/array/concat.go +++ b/go/arrow/array/concat.go @@ -23,13 +23,13 @@ import ( "math/bits" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/encoded" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/bitutils" - "github.com/apache/arrow/go/v14/internal/utils" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/encoded" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/bitutils" + "github.com/apache/arrow/go/v15/internal/utils" ) // Concatenate creates a new arrow.Array which is the concatenation of the @@ -600,6 +600,35 @@ func concat(data []arrow.ArrayData, mem memory.Allocator) (arr arrow.ArrayData, } case arrow.FixedWidthDataType: out.buffers[1] = concatBuffers(gatherBuffersFixedWidthType(data, 1, dt), mem) + case arrow.BinaryViewDataType: + out.buffers = out.buffers[:2] + for _, d := range data { + for _, buf := range d.Buffers()[2:] { + buf.Retain() + out.buffers = append(out.buffers, buf) + } + } + + out.buffers[1] = concatBuffers(gatherFixedBuffers(data, 1, arrow.ViewHeaderSizeBytes), mem) + + var ( + s = arrow.ViewHeaderTraits.CastFromBytes(out.buffers[1].Bytes()) + i = data[0].Len() + precedingBufsCount int + ) + + for idx := 1; idx < len(data); idx++ { + precedingBufsCount += len(data[idx-1].Buffers()) - 2 + + for end := i + data[idx].Len(); i < end; i++ { + if s[i].IsInline() { + continue + } + + bufIndex := s[i].BufferIndex() + int32(precedingBufsCount) + s[i].SetIndexOffset(bufIndex, s[i].BufferOffset()) + } + } case arrow.BinaryDataType: offsetWidth := dt.Layout().Buffers[1].ByteWidth offsetBuffer, valueRanges, err := concatOffsets(gatherFixedBuffers(data, 1, offsetWidth), offsetWidth, mem) @@ -739,7 +768,6 @@ func concat(data []arrow.ArrayData, mem memory.Allocator) (arr arrow.ArrayData, out.childData[0].Release() return nil, err } - default: return nil, fmt.Errorf("concatenate not implemented for type %s", dt) } diff --git a/go/arrow/array/concat_test.go b/go/arrow/array/concat_test.go index c80844f05bacd..7b22d97a41e00 100644 --- a/go/arrow/array/concat_test.go +++ b/go/arrow/array/concat_test.go @@ -23,11 +23,11 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/internal/testing/gen" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/internal/testing/gen" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" @@ -84,6 +84,7 @@ func TestConcatenate(t *testing.T) { {arrow.StructOf()}, {arrow.MapOf(arrow.PrimitiveTypes.Uint16, arrow.PrimitiveTypes.Int8)}, {&arrow.DictionaryType{IndexType: arrow.PrimitiveTypes.Int32, ValueType: arrow.PrimitiveTypes.Float64}}, + {arrow.BinaryTypes.StringView}, } for _, tt := range tests { @@ -150,6 +151,8 @@ func (cts *ConcatTestSuite) generateArr(size int64, nullprob float64) arrow.Arra return cts.rng.String(size, 0, 15, nullprob) case arrow.LARGE_STRING: return cts.rng.LargeString(size, 0, 15, nullprob) + case arrow.STRING_VIEW: + return cts.rng.StringView(size, 0, 20, nullprob) case arrow.LIST: valuesSize := size * 4 values := cts.rng.Int8(valuesSize, 0, 127, nullprob).(*array.Int8) diff --git a/go/arrow/array/data.go b/go/arrow/array/data.go index 49df06fb1174a..8cce49182b879 100644 --- a/go/arrow/array/data.go +++ b/go/arrow/array/data.go @@ -22,9 +22,9 @@ import ( "sync/atomic" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" ) // Data represents the memory and metadata of an Arrow array. diff --git a/go/arrow/array/data_test.go b/go/arrow/array/data_test.go index 2773096995fb6..b7b0f396470d7 100644 --- a/go/arrow/array/data_test.go +++ b/go/arrow/array/data_test.go @@ -19,8 +19,8 @@ package array import ( "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/decimal128.go b/go/arrow/array/decimal128.go index 3317531687ca4..16a492db09c67 100644 --- a/go/arrow/array/decimal128.go +++ b/go/arrow/array/decimal128.go @@ -25,12 +25,12 @@ import ( "strings" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) // A type which represents an immutable sequence of 128-bit decimal values. diff --git a/go/arrow/array/decimal128_test.go b/go/arrow/array/decimal128_test.go index 8ef09b68c176a..8c26d00cdc18e 100644 --- a/go/arrow/array/decimal128_test.go +++ b/go/arrow/array/decimal128_test.go @@ -19,10 +19,10 @@ package array_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/decimal256.go b/go/arrow/array/decimal256.go index d63544f784a09..8f72e414d1959 100644 --- a/go/arrow/array/decimal256.go +++ b/go/arrow/array/decimal256.go @@ -25,12 +25,12 @@ import ( "strings" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) // Decimal256 is a type that represents an immutable sequence of 256-bit decimal values. diff --git a/go/arrow/array/decimal256_test.go b/go/arrow/array/decimal256_test.go index a02098e79103f..6085d6b5a6a59 100644 --- a/go/arrow/array/decimal256_test.go +++ b/go/arrow/array/decimal256_test.go @@ -19,10 +19,10 @@ package array_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/decimal_test.go b/go/arrow/array/decimal_test.go index 2203b2702c09a..67900447be1cf 100644 --- a/go/arrow/array/decimal_test.go +++ b/go/arrow/array/decimal_test.go @@ -21,12 +21,12 @@ import ( "math/big" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/suite" ) diff --git a/go/arrow/array/dictionary.go b/go/arrow/array/dictionary.go index d0a1c4dc97e1d..856f91605ff53 100644 --- a/go/arrow/array/dictionary.go +++ b/go/arrow/array/dictionary.go @@ -25,16 +25,16 @@ import ( "sync/atomic" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/float16" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/hashing" - "github.com/apache/arrow/go/v14/internal/json" - "github.com/apache/arrow/go/v14/internal/utils" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/hashing" + "github.com/apache/arrow/go/v15/internal/json" + "github.com/apache/arrow/go/v15/internal/utils" ) // Dictionary represents the type for dictionary-encoded data with a data diff --git a/go/arrow/array/dictionary_test.go b/go/arrow/array/dictionary_test.go index d0878fa3b0329..5a3e0e10c23f3 100644 --- a/go/arrow/array/dictionary_test.go +++ b/go/arrow/array/dictionary_test.go @@ -24,13 +24,13 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/types" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/types" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" diff --git a/go/arrow/array/diff.go b/go/arrow/array/diff.go index 026a27b983f59..6bf6372531fd7 100644 --- a/go/arrow/array/diff.go +++ b/go/arrow/array/diff.go @@ -20,7 +20,7 @@ import ( "fmt" "strings" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) // Edit represents one entry in the edit script to compare two arrays. diff --git a/go/arrow/array/diff_test.go b/go/arrow/array/diff_test.go index 0eff8dc4f0577..17539c38d282f 100644 --- a/go/arrow/array/diff_test.go +++ b/go/arrow/array/diff_test.go @@ -23,11 +23,11 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" - "github.com/apache/arrow/go/v14/internal/types" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" + "github.com/apache/arrow/go/v15/internal/types" ) type diffTestCase struct { diff --git a/go/arrow/array/encoded.go b/go/arrow/array/encoded.go index bf4a942cf1c35..fa5fa7addf34c 100644 --- a/go/arrow/array/encoded.go +++ b/go/arrow/array/encoded.go @@ -23,12 +23,12 @@ import ( "reflect" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/encoded" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" - "github.com/apache/arrow/go/v14/internal/utils" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/encoded" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" + "github.com/apache/arrow/go/v15/internal/utils" ) // RunEndEncoded represents an array containing two children: diff --git a/go/arrow/array/encoded_test.go b/go/arrow/array/encoded_test.go index 57c01cf22d2d6..5bfac7a1a96e6 100644 --- a/go/arrow/array/encoded_test.go +++ b/go/arrow/array/encoded_test.go @@ -20,10 +20,10 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/go/arrow/array/extension.go b/go/arrow/array/extension.go index 03e8c1734b9d6..021b8e7bc451b 100644 --- a/go/arrow/array/extension.go +++ b/go/arrow/array/extension.go @@ -20,9 +20,9 @@ import ( "fmt" "reflect" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) // ExtensionArray is the interface that needs to be implemented to handle diff --git a/go/arrow/array/extension_test.go b/go/arrow/array/extension_test.go index 5b473fed428ef..a8e2b0dfd59bb 100644 --- a/go/arrow/array/extension_test.go +++ b/go/arrow/array/extension_test.go @@ -19,10 +19,10 @@ package array_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/types" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/types" "github.com/stretchr/testify/suite" ) diff --git a/go/arrow/array/fixed_size_list.go b/go/arrow/array/fixed_size_list.go index 62c3213861a7d..5923d68590b15 100644 --- a/go/arrow/array/fixed_size_list.go +++ b/go/arrow/array/fixed_size_list.go @@ -22,11 +22,11 @@ import ( "strings" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) // FixedSizeList represents an immutable sequence of N array values. diff --git a/go/arrow/array/fixed_size_list_test.go b/go/arrow/array/fixed_size_list_test.go index 83dff923fe120..5c01199ddf987 100644 --- a/go/arrow/array/fixed_size_list_test.go +++ b/go/arrow/array/fixed_size_list_test.go @@ -20,9 +20,9 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/fixedsize_binary.go b/go/arrow/array/fixedsize_binary.go index 5466156d5a256..6cdaeace939fd 100644 --- a/go/arrow/array/fixedsize_binary.go +++ b/go/arrow/array/fixedsize_binary.go @@ -22,8 +22,8 @@ import ( "fmt" "strings" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/internal/json" ) // A type which represents an immutable sequence of fixed-length binary strings. diff --git a/go/arrow/array/fixedsize_binary_test.go b/go/arrow/array/fixedsize_binary_test.go index b65c7051a9b3e..785725537cbdd 100644 --- a/go/arrow/array/fixedsize_binary_test.go +++ b/go/arrow/array/fixedsize_binary_test.go @@ -21,9 +21,9 @@ import ( "github.com/stretchr/testify/assert" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" ) func TestFixedSizeBinary(t *testing.T) { diff --git a/go/arrow/array/fixedsize_binarybuilder.go b/go/arrow/array/fixedsize_binarybuilder.go index ba4b474a89fd0..230a65fd2d352 100644 --- a/go/arrow/array/fixedsize_binarybuilder.go +++ b/go/arrow/array/fixedsize_binarybuilder.go @@ -23,10 +23,10 @@ import ( "reflect" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) // A FixedSizeBinaryBuilder is used to build a FixedSizeBinary array using the Append methods. diff --git a/go/arrow/array/fixedsize_binarybuilder_test.go b/go/arrow/array/fixedsize_binarybuilder_test.go index e3962ad3b9372..8e4a0ac1e46a7 100644 --- a/go/arrow/array/fixedsize_binarybuilder_test.go +++ b/go/arrow/array/fixedsize_binarybuilder_test.go @@ -19,8 +19,8 @@ package array import ( "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/float16.go b/go/arrow/array/float16.go index de499e26706cc..4260f8e3774b4 100644 --- a/go/arrow/array/float16.go +++ b/go/arrow/array/float16.go @@ -20,9 +20,9 @@ import ( "fmt" "strings" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/float16" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/internal/json" ) // A type which represents an immutable sequence of Float16 values. diff --git a/go/arrow/array/float16_builder.go b/go/arrow/array/float16_builder.go index f96ab6037e002..033b9fa2d8028 100644 --- a/go/arrow/array/float16_builder.go +++ b/go/arrow/array/float16_builder.go @@ -23,12 +23,12 @@ import ( "strconv" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/float16" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) type Float16Builder struct { diff --git a/go/arrow/array/float16_builder_test.go b/go/arrow/array/float16_builder_test.go index 6ee028d978fb0..f8c5890179869 100644 --- a/go/arrow/array/float16_builder_test.go +++ b/go/arrow/array/float16_builder_test.go @@ -19,9 +19,9 @@ package array_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/float16" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/interval.go b/go/arrow/array/interval.go index ff059c92c8583..2a5529f1c30f7 100644 --- a/go/arrow/array/interval.go +++ b/go/arrow/array/interval.go @@ -23,11 +23,11 @@ import ( "strings" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) func NewIntervalData(data arrow.ArrayData) arrow.Array { diff --git a/go/arrow/array/interval_test.go b/go/arrow/array/interval_test.go index 50a96e4779f03..f83fc52dfa34e 100644 --- a/go/arrow/array/interval_test.go +++ b/go/arrow/array/interval_test.go @@ -20,9 +20,9 @@ import ( "math" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/json_reader.go b/go/arrow/array/json_reader.go index e09717c4199b7..2f05d4b70dd76 100644 --- a/go/arrow/array/json_reader.go +++ b/go/arrow/array/json_reader.go @@ -22,10 +22,10 @@ import ( "io" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) type Option func(config) diff --git a/go/arrow/array/json_reader_test.go b/go/arrow/array/json_reader_test.go index 3a095a0d33189..7f12bf211dd04 100644 --- a/go/arrow/array/json_reader_test.go +++ b/go/arrow/array/json_reader_test.go @@ -20,9 +20,9 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/list.go b/go/arrow/array/list.go index d8d8b8c76165a..f10e2072c43a2 100644 --- a/go/arrow/array/list.go +++ b/go/arrow/array/list.go @@ -23,11 +23,11 @@ import ( "strings" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) type ListLike interface { diff --git a/go/arrow/array/list_test.go b/go/arrow/array/list_test.go index bf3555b3f6603..11404b2d8bb95 100644 --- a/go/arrow/array/list_test.go +++ b/go/arrow/array/list_test.go @@ -20,9 +20,9 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/map.go b/go/arrow/array/map.go index 9945a90ce495e..fe07a68785067 100644 --- a/go/arrow/array/map.go +++ b/go/arrow/array/map.go @@ -20,9 +20,9 @@ import ( "bytes" "fmt" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) // Map represents an immutable sequence of Key/Value structs. It is a diff --git a/go/arrow/array/map_test.go b/go/arrow/array/map_test.go index 3fe78549ec803..a7ecc032682bc 100644 --- a/go/arrow/array/map_test.go +++ b/go/arrow/array/map_test.go @@ -20,9 +20,9 @@ import ( "strconv" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/null.go b/go/arrow/array/null.go index 150a1030eb49d..2735a88a92cb3 100644 --- a/go/arrow/array/null.go +++ b/go/arrow/array/null.go @@ -23,10 +23,10 @@ import ( "strings" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) // Null represents an immutable, degenerate array with no physical storage. diff --git a/go/arrow/array/null_test.go b/go/arrow/array/null_test.go index e1bf1e0345b84..5d230ec5cec71 100644 --- a/go/arrow/array/null_test.go +++ b/go/arrow/array/null_test.go @@ -19,9 +19,9 @@ package array_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/numeric.gen.go b/go/arrow/array/numeric.gen.go index a3e1101516aa6..59c9a979768d5 100644 --- a/go/arrow/array/numeric.gen.go +++ b/go/arrow/array/numeric.gen.go @@ -23,8 +23,8 @@ import ( "strconv" "strings" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/internal/json" ) // A type which represents an immutable sequence of int64 values. diff --git a/go/arrow/array/numeric.gen.go.tmpl b/go/arrow/array/numeric.gen.go.tmpl index 34d17fbfc8854..027456009daad 100644 --- a/go/arrow/array/numeric.gen.go.tmpl +++ b/go/arrow/array/numeric.gen.go.tmpl @@ -21,8 +21,8 @@ import ( "strings" "time" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/internal/json" ) {{range .In}} diff --git a/go/arrow/array/numeric_test.go b/go/arrow/array/numeric_test.go index 34f59c238cbef..91dd724c8da50 100644 --- a/go/arrow/array/numeric_test.go +++ b/go/arrow/array/numeric_test.go @@ -21,10 +21,10 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/numericbuilder.gen.go b/go/arrow/array/numericbuilder.gen.go index 7f01180f55957..52b189d6ed453 100644 --- a/go/arrow/array/numericbuilder.gen.go +++ b/go/arrow/array/numericbuilder.gen.go @@ -27,11 +27,11 @@ import ( "sync/atomic" "time" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) type Int64Builder struct { diff --git a/go/arrow/array/numericbuilder.gen.go.tmpl b/go/arrow/array/numericbuilder.gen.go.tmpl index cf663c031f616..82ac35465d424 100644 --- a/go/arrow/array/numericbuilder.gen.go.tmpl +++ b/go/arrow/array/numericbuilder.gen.go.tmpl @@ -17,11 +17,11 @@ package array import ( - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) {{range .In}} diff --git a/go/arrow/array/numericbuilder.gen_test.go b/go/arrow/array/numericbuilder.gen_test.go index b5986775cd995..e1f72773403d8 100644 --- a/go/arrow/array/numericbuilder.gen_test.go +++ b/go/arrow/array/numericbuilder.gen_test.go @@ -21,9 +21,9 @@ package array_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/numericbuilder.gen_test.go.tmpl b/go/arrow/array/numericbuilder.gen_test.go.tmpl index bc8c993374671..eddd884e2eddf 100644 --- a/go/arrow/array/numericbuilder.gen_test.go.tmpl +++ b/go/arrow/array/numericbuilder.gen_test.go.tmpl @@ -19,9 +19,9 @@ package array_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/record.go b/go/arrow/array/record.go index 0b0fe4c3847fc..d080f726e472d 100644 --- a/go/arrow/array/record.go +++ b/go/arrow/array/record.go @@ -22,10 +22,10 @@ import ( "strings" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) // RecordReader reads a stream of records. diff --git a/go/arrow/array/record_test.go b/go/arrow/array/record_test.go index da50a30b646f1..7d438d1f1f81e 100644 --- a/go/arrow/array/record_test.go +++ b/go/arrow/array/record_test.go @@ -21,9 +21,9 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/string.go b/go/arrow/array/string.go index 86e27c970cbe9..90a4628f0d0fb 100644 --- a/go/arrow/array/string.go +++ b/go/arrow/array/string.go @@ -23,11 +23,16 @@ import ( "strings" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) +type StringLike interface { + arrow.Array + Value(int) string +} + // String represents an immutable sequence of variable-length UTF-8 strings. type String struct { array @@ -310,6 +315,108 @@ func arrayEqualLargeString(left, right *LargeString) bool { return true } +type StringView struct { + array + values []arrow.ViewHeader + dataBuffers []*memory.Buffer +} + +func NewStringViewData(data arrow.ArrayData) *StringView { + a := &StringView{} + a.refCount = 1 + a.setData(data.(*Data)) + return a +} + +// Reset resets the String with a different set of Data. +func (a *StringView) Reset(data arrow.ArrayData) { + a.setData(data.(*Data)) +} + +func (a *StringView) setData(data *Data) { + if len(data.buffers) < 2 { + panic("len(data.buffers) < 2") + } + a.array.setData(data) + + if valueData := data.buffers[1]; valueData != nil { + a.values = arrow.ViewHeaderTraits.CastFromBytes(valueData.Bytes()) + } + + a.dataBuffers = data.buffers[2:] +} + +func (a *StringView) ValueHeader(i int) *arrow.ViewHeader { + if i < 0 || i >= a.array.data.length { + panic("arrow/array: index out of range") + } + return &a.values[a.array.data.offset+i] +} + +func (a *StringView) Value(i int) string { + s := a.ValueHeader(i) + if s.IsInline() { + return s.InlineString() + } + start := s.BufferOffset() + buf := a.dataBuffers[s.BufferIndex()] + value := buf.Bytes()[start : start+int32(s.Len())] + return *(*string)(unsafe.Pointer(&value)) +} + +func (a *StringView) String() string { + var o strings.Builder + o.WriteString("[") + for i := 0; i < a.Len(); i++ { + if i > 0 { + o.WriteString(" ") + } + switch { + case a.IsNull(i): + o.WriteString(NullValueStr) + default: + fmt.Fprintf(&o, "%q", a.Value(i)) + } + } + o.WriteString("]") + return o.String() +} + +func (a *StringView) ValueStr(i int) string { + if a.IsNull(i) { + return NullValueStr + } + return a.Value(i) +} + +func (a *StringView) GetOneForMarshal(i int) interface{} { + if a.IsNull(i) { + return nil + } + return a.Value(i) +} + +func (a *StringView) MarshalJSON() ([]byte, error) { + vals := make([]interface{}, a.Len()) + for i := 0; i < a.Len(); i++ { + vals[i] = a.GetOneForMarshal(i) + } + return json.Marshal(vals) +} + +func arrayEqualStringView(left, right *StringView) bool { + leftBufs, rightBufs := left.dataBuffers, right.dataBuffers + for i := 0; i < left.Len(); i++ { + if left.IsNull(i) { + continue + } + if !left.ValueHeader(i).Equals(leftBufs, right.ValueHeader(i), rightBufs) { + return false + } + } + return true +} + // A StringBuilder is used to build a String array using the Append methods. type StringBuilder struct { *BinaryBuilder @@ -344,10 +451,6 @@ func (b *StringBuilder) Value(i int) string { return string(b.BinaryBuilder.Value(i)) } -// func (b *StringBuilder) UnsafeAppend(v string) { -// b.BinaryBuilder.UnsafeAppend([]byte(v)) -// } - // NewArray creates a String array from the memory buffers used by the builder and resets the StringBuilder // so it can be used to build a new array. func (b *StringBuilder) NewArray() arrow.Array { @@ -441,10 +544,6 @@ func (b *LargeStringBuilder) Value(i int) string { return string(b.BinaryBuilder.Value(i)) } -// func (b *LargeStringBuilder) UnsafeAppend(v string) { -// b.BinaryBuilder.UnsafeAppend([]byte(v)) -// } - // NewArray creates a String array from the memory buffers used by the builder and resets the StringBuilder // so it can be used to build a new array. func (b *LargeStringBuilder) NewArray() arrow.Array { @@ -504,9 +603,87 @@ func (b *LargeStringBuilder) UnmarshalJSON(data []byte) error { return b.Unmarshal(dec) } +type StringViewBuilder struct { + *BinaryViewBuilder +} + +func NewStringViewBuilder(mem memory.Allocator) *StringViewBuilder { + bldr := &StringViewBuilder{ + BinaryViewBuilder: NewBinaryViewBuilder(mem), + } + bldr.dtype = arrow.BinaryTypes.StringView + return bldr +} + +func (b *StringViewBuilder) Append(v string) { + b.BinaryViewBuilder.AppendString(v) +} + +func (b *StringViewBuilder) AppendValues(v []string, valid []bool) { + b.BinaryViewBuilder.AppendStringValues(v, valid) +} + +func (b *StringViewBuilder) UnmarshalOne(dec *json.Decoder) error { + t, err := dec.Token() + if err != nil { + return err + } + + switch v := t.(type) { + case string: + b.Append(v) + case []byte: + b.BinaryViewBuilder.Append(v) + case nil: + b.AppendNull() + default: + return &json.UnmarshalTypeError{ + Value: fmt.Sprint(t), + Type: reflect.TypeOf([]byte{}), + Offset: dec.InputOffset(), + } + } + return nil +} + +func (b *StringViewBuilder) Unmarshal(dec *json.Decoder) error { + for dec.More() { + if err := b.UnmarshalOne(dec); err != nil { + return err + } + } + return nil +} + +func (b *StringViewBuilder) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) + t, err := dec.Token() + if err != nil { + return err + } + + if delim, ok := t.(json.Delim); !ok || delim != '[' { + return fmt.Errorf("binary view builder must unpack from json array, found %s", delim) + } + + return b.Unmarshal(dec) +} + +func (b *StringViewBuilder) NewArray() arrow.Array { + return b.NewStringViewArray() +} + +func (b *StringViewBuilder) NewStringViewArray() (a *StringView) { + data := b.newData() + a = NewStringViewData(data) + data.Release() + return +} + type StringLikeBuilder interface { Builder Append(string) + AppendValues([]string, []bool) UnsafeAppend([]byte) ReserveData(int) } @@ -514,8 +691,11 @@ type StringLikeBuilder interface { var ( _ arrow.Array = (*String)(nil) _ arrow.Array = (*LargeString)(nil) + _ arrow.Array = (*StringView)(nil) _ Builder = (*StringBuilder)(nil) _ Builder = (*LargeStringBuilder)(nil) + _ Builder = (*StringViewBuilder)(nil) _ StringLikeBuilder = (*StringBuilder)(nil) _ StringLikeBuilder = (*LargeStringBuilder)(nil) + _ StringLikeBuilder = (*StringViewBuilder)(nil) ) diff --git a/go/arrow/array/string_test.go b/go/arrow/array/string_test.go index fbc106b098332..803fae51347c1 100644 --- a/go/arrow/array/string_test.go +++ b/go/arrow/array/string_test.go @@ -21,10 +21,10 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) @@ -619,3 +619,176 @@ func TestStringValueLen(t *testing.T) { assert.Equal(t, len(v), slice.ValueLen(i)) } } +func TestStringViewArray(t *testing.T) { + mem := memory.NewCheckedAllocator(memory.NewGoAllocator()) + defer mem.AssertSize(t, 0) + + var ( + // only the last string is long enough to not get inlined + want = []string{"hello", "世界", "", "say goodbye daffy"} + valids = []bool{true, true, false, true} + ) + + sb := array.NewStringViewBuilder(mem) + defer sb.Release() + + sb.Retain() + sb.Release() + + assert.NoError(t, sb.AppendValueFromString(want[0])) + sb.AppendValues(want[1:2], nil) + + sb.AppendNull() + sb.Append(want[3]) + + if got, want := sb.Len(), len(want); got != want { + t.Fatalf("invalid len: got=%d, want=%d", got, want) + } + + if got, want := sb.NullN(), 1; got != want { + t.Fatalf("invalid nulls: got=%d, want=%d", got, want) + } + + arr := sb.NewStringViewArray() + defer arr.Release() + + arr.Retain() + arr.Release() + + assert.Equal(t, "hello", arr.ValueStr(0)) + + if got, want := arr.Len(), len(want); got != want { + t.Fatalf("invalid len: got=%d, want=%d", got, want) + } + + if got, want := arr.NullN(), 1; got != want { + t.Fatalf("invalid nulls: got=%d, want=%d", got, want) + } + + for i := range want { + if arr.IsNull(i) != !valids[i] { + t.Fatalf("arr[%d]-validity: got=%v want=%v", i, !arr.IsNull(i), valids[i]) + } + switch { + case arr.IsNull(i): + default: + got := arr.Value(i) + if got != want[i] { + t.Fatalf("arr[%d]: got=%q, want=%q", i, got, want[i]) + } + } + } + + sub := array.MakeFromData(arr.Data()) + defer sub.Release() + + if sub.DataType().ID() != arrow.STRING_VIEW { + t.Fatalf("invalid type: got=%q, want=string view", sub.DataType().Name()) + } + + if _, ok := sub.(*array.StringView); !ok { + t.Fatalf("could not type-assert to array.String") + } + + if got, want := arr.String(), `["hello" "世界" (null) "say goodbye daffy"]`; got != want { + t.Fatalf("got=%q, want=%q", got, want) + } + + // only the last string gets stuck into a buffer the rest are inlined + // in the headers. + if !bytes.Equal([]byte(`say goodbye daffy`), arr.Data().Buffers()[2].Bytes()) { + t.Fatalf("got=%q, want=%q", string(arr.Data().Buffers()[2].Bytes()), `say goodbye daffy`) + } + + // check the prefix for the non-inlined value + if [4]byte{'s', 'a', 'y', ' '} != arr.ValueHeader(3).Prefix() { + t.Fatalf("got=%q, want=%q", arr.ValueHeader(3).Prefix(), `say `) + } + + slice := array.NewSliceData(arr.Data(), 2, 4) + defer slice.Release() + + sub1 := array.MakeFromData(slice) + defer sub1.Release() + + v, ok := sub1.(*array.StringView) + if !ok { + t.Fatalf("could not type-assert to array.StringView") + } + + if got, want := v.String(), `[(null) "say goodbye daffy"]`; got != want { + t.Fatalf("got=%q, want=%q", got, want) + } + + if !bytes.Equal([]byte(`say goodbye daffy`), v.Data().Buffers()[2].Bytes()) { + t.Fatalf("got=%q, want=%q", string(v.Data().Buffers()[2].Bytes()), `say goodbye daffy`) + } + + // check the prefix for the non-inlined value + if [4]byte{'s', 'a', 'y', ' '} != v.ValueHeader(1).Prefix() { + t.Fatalf("got=%q, want=%q", v.ValueHeader(1).Prefix(), `say `) + } +} + +func TestStringViewBuilder_Empty(t *testing.T) { + mem := memory.NewCheckedAllocator(memory.NewGoAllocator()) + defer mem.AssertSize(t, 0) + + want := []string{"hello", "世界", "", "say goodbye daffy"} + + ab := array.NewStringViewBuilder(mem) + defer ab.Release() + + stringValues := func(a *array.StringView) []string { + vs := make([]string, a.Len()) + for i := range vs { + vs[i] = a.Value(i) + } + return vs + } + + ab.AppendValues([]string{}, nil) + a := ab.NewStringViewArray() + assert.Zero(t, a.Len()) + a.Release() + + ab.AppendValues(nil, nil) + a = ab.NewStringViewArray() + assert.Zero(t, a.Len()) + a.Release() + + ab.AppendValues([]string{}, nil) + ab.AppendValues(want, nil) + a = ab.NewStringViewArray() + assert.Equal(t, want, stringValues(a)) + a.Release() + + ab.AppendValues(want, nil) + ab.AppendValues([]string{}, nil) + a = ab.NewStringViewArray() + assert.Equal(t, want, stringValues(a)) + a.Release() +} + +// TestStringReset tests the Reset() method on the String type by creating two different Strings and then +// reseting the contents of string2 with the values from string1. +func TestStringViewReset(t *testing.T) { + mem := memory.NewCheckedAllocator(memory.NewGoAllocator()) + sb1 := array.NewStringViewBuilder(mem) + sb2 := array.NewStringViewBuilder(mem) + defer sb1.Release() + defer sb2.Release() + + sb1.Append("string1") + sb1.AppendNull() + + var ( + string1 = sb1.NewStringViewArray() + string2 = sb2.NewStringViewArray() + + string1Data = string1.Data() + ) + string2.Reset(string1Data) + + assert.Equal(t, "string1", string2.Value(0)) +} diff --git a/go/arrow/array/struct.go b/go/arrow/array/struct.go index 248a25bf6044d..94052953852c2 100644 --- a/go/arrow/array/struct.go +++ b/go/arrow/array/struct.go @@ -23,11 +23,11 @@ import ( "strings" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) // Struct represents an ordered sequence of relative types. diff --git a/go/arrow/array/struct_test.go b/go/arrow/array/struct_test.go index 86a5b311fcc21..1b0dc5a3e4b19 100644 --- a/go/arrow/array/struct_test.go +++ b/go/arrow/array/struct_test.go @@ -20,9 +20,9 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/table.go b/go/arrow/array/table.go index 6456992e34973..a987dd057f82c 100644 --- a/go/arrow/array/table.go +++ b/go/arrow/array/table.go @@ -23,8 +23,8 @@ import ( "strings" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/internal/debug" ) // NewColumnSlice returns a new zero-copy slice of the column with the indicated diff --git a/go/arrow/array/table_test.go b/go/arrow/array/table_test.go index 8e22d53963ba4..9535ae6b089bb 100644 --- a/go/arrow/array/table_test.go +++ b/go/arrow/array/table_test.go @@ -22,9 +22,9 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" ) func TestChunked(t *testing.T) { diff --git a/go/arrow/array/timestamp.go b/go/arrow/array/timestamp.go index 2928b1fc77ff6..6ffb43e067af0 100644 --- a/go/arrow/array/timestamp.go +++ b/go/arrow/array/timestamp.go @@ -24,11 +24,11 @@ import ( "sync/atomic" "time" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) // Timestamp represents an immutable sequence of arrow.Timestamp values. diff --git a/go/arrow/array/timestamp_test.go b/go/arrow/array/timestamp_test.go index 1e55364e8da0d..acbad8b586dd4 100644 --- a/go/arrow/array/timestamp_test.go +++ b/go/arrow/array/timestamp_test.go @@ -20,9 +20,9 @@ import ( "testing" "time" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/array/union.go b/go/arrow/array/union.go index 869355ac7108a..c0a5050560634 100644 --- a/go/arrow/array/union.go +++ b/go/arrow/array/union.go @@ -25,12 +25,12 @@ import ( "strings" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/bitutils" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/bitutils" + "github.com/apache/arrow/go/v15/internal/json" ) // Union is a convenience interface to encompass both Sparse and Dense diff --git a/go/arrow/array/union_test.go b/go/arrow/array/union_test.go index 2f2bf3b84e9f7..e876f5def26d3 100644 --- a/go/arrow/array/union_test.go +++ b/go/arrow/array/union_test.go @@ -21,9 +21,9 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" ) diff --git a/go/arrow/array/util.go b/go/arrow/array/util.go index 54d15a8095c80..a1b3cc7d4e5f7 100644 --- a/go/arrow/array/util.go +++ b/go/arrow/array/util.go @@ -22,11 +22,11 @@ import ( "io" "strings" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/hashing" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/hashing" + "github.com/apache/arrow/go/v15/internal/json" ) func min(a, b int) int { diff --git a/go/arrow/array/util_test.go b/go/arrow/array/util_test.go index d3d2b37365513..84a6debdf3946 100644 --- a/go/arrow/array/util_test.go +++ b/go/arrow/array/util_test.go @@ -25,13 +25,13 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/internal/arrdata" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/internal/arrdata" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/go/arrow/arrio/arrio.go b/go/arrow/arrio/arrio.go index 466a93a68400a..51cf6dc46d44a 100644 --- a/go/arrow/arrio/arrio.go +++ b/go/arrow/arrio/arrio.go @@ -22,7 +22,7 @@ import ( "errors" "io" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) // Reader is the interface that wraps the Read method. diff --git a/go/arrow/arrio/arrio_test.go b/go/arrow/arrio/arrio_test.go index 7e9aac8fbe520..c80d5d2569d67 100644 --- a/go/arrow/arrio/arrio_test.go +++ b/go/arrow/arrio/arrio_test.go @@ -22,11 +22,11 @@ import ( "os" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/arrio" - "github.com/apache/arrow/go/v14/arrow/internal/arrdata" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/arrio" + "github.com/apache/arrow/go/v15/arrow/internal/arrdata" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" ) type copyKind int diff --git a/go/arrow/bitutil/bitmaps.go b/go/arrow/bitutil/bitmaps.go index 2e9c0601c3c45..d7516771def7f 100644 --- a/go/arrow/bitutil/bitmaps.go +++ b/go/arrow/bitutil/bitmaps.go @@ -22,9 +22,9 @@ import ( "math/bits" "unsafe" - "github.com/apache/arrow/go/v14/arrow/endian" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" ) // BitmapReader is a simple bitmap reader for a byte slice. diff --git a/go/arrow/bitutil/bitmaps_test.go b/go/arrow/bitutil/bitmaps_test.go index 60022a179d1da..c926bff39e09a 100644 --- a/go/arrow/bitutil/bitmaps_test.go +++ b/go/arrow/bitutil/bitmaps_test.go @@ -22,8 +22,8 @@ import ( "strconv" "testing" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" ) diff --git a/go/arrow/bitutil/bitutil.go b/go/arrow/bitutil/bitutil.go index a4a1519b8f940..dc510a8b374c4 100644 --- a/go/arrow/bitutil/bitutil.go +++ b/go/arrow/bitutil/bitutil.go @@ -22,7 +22,7 @@ import ( "reflect" "unsafe" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/memory" ) var ( diff --git a/go/arrow/bitutil/bitutil_test.go b/go/arrow/bitutil/bitutil_test.go index 9e29e9d04d1db..189c8541f4925 100644 --- a/go/arrow/bitutil/bitutil_test.go +++ b/go/arrow/bitutil/bitutil_test.go @@ -21,8 +21,8 @@ import ( "math/rand" "testing" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/internal/testing/tools" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/internal/testing/tools" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/cdata/cdata.go b/go/arrow/cdata/cdata.go index 1d454c57f722e..f9693851d7483 100644 --- a/go/arrow/cdata/cdata.go +++ b/go/arrow/cdata/cdata.go @@ -46,10 +46,10 @@ import ( "syscall" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" "golang.org/x/xerrors" ) diff --git a/go/arrow/cdata/cdata_exports.go b/go/arrow/cdata/cdata_exports.go index 187c2deb9755f..91f1b352e0327 100644 --- a/go/arrow/cdata/cdata_exports.go +++ b/go/arrow/cdata/cdata_exports.go @@ -45,11 +45,11 @@ import ( "strings" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/endian" - "github.com/apache/arrow/go/v14/arrow/internal" - "github.com/apache/arrow/go/v14/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/internal" + "github.com/apache/arrow/go/v15/arrow/ipc" ) func encodeCMetadata(keys, values []string) []byte { diff --git a/go/arrow/cdata/cdata_test.go b/go/arrow/cdata/cdata_test.go index f09fa3ff2f6fa..607cfe53217a6 100644 --- a/go/arrow/cdata/cdata_test.go +++ b/go/arrow/cdata/cdata_test.go @@ -35,12 +35,12 @@ import ( "time" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/internal/arrdata" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/memory/mallocator" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/internal/arrdata" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/memory/mallocator" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/cdata/cdata_test_framework.go b/go/arrow/cdata/cdata_test_framework.go index 2df52dcb3cd40..1251b20201e41 100644 --- a/go/arrow/cdata/cdata_test_framework.go +++ b/go/arrow/cdata/cdata_test_framework.go @@ -69,10 +69,10 @@ import ( "runtime/cgo" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/internal" - "github.com/apache/arrow/go/v14/arrow/memory/mallocator" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/internal" + "github.com/apache/arrow/go/v15/arrow/memory/mallocator" ) const ( diff --git a/go/arrow/cdata/exports.go b/go/arrow/cdata/exports.go index 9d2576818e31c..7353df62d113a 100644 --- a/go/arrow/cdata/exports.go +++ b/go/arrow/cdata/exports.go @@ -20,8 +20,8 @@ import ( "runtime/cgo" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" ) // #include diff --git a/go/arrow/cdata/import_allocator.go b/go/arrow/cdata/import_allocator.go index eff8c7517caef..cf1c6a961ff37 100644 --- a/go/arrow/cdata/import_allocator.go +++ b/go/arrow/cdata/import_allocator.go @@ -20,7 +20,7 @@ import ( "sync/atomic" "unsafe" - "github.com/apache/arrow/go/v14/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/internal/debug" ) // #include "arrow/c/helpers.h" diff --git a/go/arrow/cdata/interface.go b/go/arrow/cdata/interface.go index bf5c5270bae2f..8ce06280a0bf5 100644 --- a/go/arrow/cdata/interface.go +++ b/go/arrow/cdata/interface.go @@ -22,10 +22,10 @@ package cdata import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/arrio" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/arrio" + "github.com/apache/arrow/go/v15/arrow/memory" "golang.org/x/xerrors" ) diff --git a/go/arrow/cdata/test/test_cimport.go b/go/arrow/cdata/test/test_cimport.go index a7c5dc034496a..147c3691f0c71 100644 --- a/go/arrow/cdata/test/test_cimport.go +++ b/go/arrow/cdata/test/test_cimport.go @@ -23,10 +23,10 @@ import ( "fmt" "runtime" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/cdata" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/cdata" + "github.com/apache/arrow/go/v15/arrow/memory" ) // #include diff --git a/go/arrow/compute/arithmetic.go b/go/arrow/compute/arithmetic.go index 2fb95f06ff84a..1ee1959b2ddc8 100644 --- a/go/arrow/compute/arithmetic.go +++ b/go/arrow/compute/arithmetic.go @@ -22,12 +22,12 @@ import ( "context" "fmt" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/compute/internal/kernels" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/compute/internal/kernels" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/scalar" ) type ( diff --git a/go/arrow/compute/arithmetic_test.go b/go/arrow/compute/arithmetic_test.go index a99b2074eb69f..821ffd9e068d4 100644 --- a/go/arrow/compute/arithmetic_test.go +++ b/go/arrow/compute/arithmetic_test.go @@ -26,16 +26,16 @@ import ( "testing" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/compute/internal/kernels" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/internal/testing/gen" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/compute/internal/kernels" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/internal/testing/gen" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" "github.com/klauspost/cpuid/v2" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/go/arrow/compute/cast.go b/go/arrow/compute/cast.go index 8b720a2bad5d8..133a983038ce1 100644 --- a/go/arrow/compute/cast.go +++ b/go/arrow/compute/cast.go @@ -23,11 +23,11 @@ import ( "fmt" "sync" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/compute/internal/kernels" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/compute/internal/kernels" ) var ( diff --git a/go/arrow/compute/cast_test.go b/go/arrow/compute/cast_test.go index cc301cc35e6e5..10957a45167aa 100644 --- a/go/arrow/compute/cast_test.go +++ b/go/arrow/compute/cast_test.go @@ -26,16 +26,16 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/compute" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/internal/testing/gen" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" - "github.com/apache/arrow/go/v14/internal/types" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/compute" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/internal/testing/gen" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" + "github.com/apache/arrow/go/v15/internal/types" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" diff --git a/go/arrow/compute/datum.go b/go/arrow/compute/datum.go index 1d3c1b4dab700..98bd1f1a0a326 100644 --- a/go/arrow/compute/datum.go +++ b/go/arrow/compute/datum.go @@ -21,9 +21,9 @@ package compute import ( "fmt" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/scalar" ) //go:generate go run golang.org/x/tools/cmd/stringer -type=DatumKind -linecomment diff --git a/go/arrow/compute/example_test.go b/go/arrow/compute/example_test.go index f0db9f377868e..e2b07b7e191ae 100644 --- a/go/arrow/compute/example_test.go +++ b/go/arrow/compute/example_test.go @@ -23,11 +23,11 @@ import ( "fmt" "log" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/memory" ) // This example demonstrates how to register a custom scalar function. diff --git a/go/arrow/compute/exec.go b/go/arrow/compute/exec.go index 84e3310cc2df8..eba47e64bd509 100644 --- a/go/arrow/compute/exec.go +++ b/go/arrow/compute/exec.go @@ -22,9 +22,9 @@ import ( "context" "fmt" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/internal/debug" ) func haveChunkedArray(values []Datum) bool { diff --git a/go/arrow/compute/exec/kernel.go b/go/arrow/compute/exec/kernel.go index 327426da68eb9..766857f63e565 100644 --- a/go/arrow/compute/exec/kernel.go +++ b/go/arrow/compute/exec/kernel.go @@ -24,10 +24,10 @@ import ( "hash/maphash" "strings" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" "golang.org/x/exp/slices" ) diff --git a/go/arrow/compute/exec/kernel_test.go b/go/arrow/compute/exec/kernel_test.go index 16a29ea2deda6..4df6b42ff9408 100644 --- a/go/arrow/compute/exec/kernel_test.go +++ b/go/arrow/compute/exec/kernel_test.go @@ -22,12 +22,12 @@ import ( "fmt" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/compute/exec/span.go b/go/arrow/compute/exec/span.go index b6d240fa4a0be..0b5f6208227f5 100644 --- a/go/arrow/compute/exec/span.go +++ b/go/arrow/compute/exec/span.go @@ -23,11 +23,11 @@ import ( "sync/atomic" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" ) // BufferSpan is a lightweight Buffer holder for ArraySpans that does not diff --git a/go/arrow/compute/exec/span_test.go b/go/arrow/compute/exec/span_test.go index c642be1c30c23..474c005b44642 100644 --- a/go/arrow/compute/exec/span_test.go +++ b/go/arrow/compute/exec/span_test.go @@ -24,14 +24,14 @@ import ( "testing" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/endian" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" - "github.com/apache/arrow/go/v14/internal/types" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" + "github.com/apache/arrow/go/v15/internal/types" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/compute/exec/utils.go b/go/arrow/compute/exec/utils.go index 6d83b75da1313..17bc30ef2adb0 100644 --- a/go/arrow/compute/exec/utils.go +++ b/go/arrow/compute/exec/utils.go @@ -25,13 +25,13 @@ import ( "sync/atomic" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/float16" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/arrow/memory" "golang.org/x/exp/constraints" "golang.org/x/exp/slices" ) diff --git a/go/arrow/compute/exec/utils_test.go b/go/arrow/compute/exec/utils_test.go index e9bb07177f2ac..b26e4ff41e79f 100644 --- a/go/arrow/compute/exec/utils_test.go +++ b/go/arrow/compute/exec/utils_test.go @@ -21,10 +21,10 @@ package exec_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/compute/exec_internals_test.go b/go/arrow/compute/exec_internals_test.go index eb9273d23389f..bae32268862ff 100644 --- a/go/arrow/compute/exec_internals_test.go +++ b/go/arrow/compute/exec_internals_test.go @@ -24,13 +24,13 @@ import ( "fmt" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/internal/testing/gen" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/internal/testing/gen" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" "github.com/stretchr/testify/suite" ) diff --git a/go/arrow/compute/exec_test.go b/go/arrow/compute/exec_test.go index 30bfbc0246a1c..a37f67c03e8ce 100644 --- a/go/arrow/compute/exec_test.go +++ b/go/arrow/compute/exec_test.go @@ -22,12 +22,12 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/scalar" "github.com/stretchr/testify/suite" ) diff --git a/go/arrow/compute/executor.go b/go/arrow/compute/executor.go index 6da7ed1293065..db89b206daf5f 100644 --- a/go/arrow/compute/executor.go +++ b/go/arrow/compute/executor.go @@ -25,14 +25,14 @@ import ( "runtime" "sync" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/internal" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/internal" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" ) // ExecCtx holds simple contextual information for execution @@ -171,6 +171,8 @@ func addComputeDataPrealloc(dt arrow.DataType, widths []bufferPrealloc) []buffer return append(widths, bufferPrealloc{bitWidth: 32, addLen: 1}) case arrow.LARGE_BINARY, arrow.LARGE_STRING, arrow.LARGE_LIST: return append(widths, bufferPrealloc{bitWidth: 64, addLen: 1}) + case arrow.STRING_VIEW, arrow.BINARY_VIEW: + return append(widths, bufferPrealloc{bitWidth: arrow.ViewHeaderSizeBytes * 8}) } return widths } @@ -1007,9 +1009,10 @@ func (v *vectorExecutor) WrapResults(ctx context.Context, out <-chan Datum, hasC case <-ctx.Done(): return nil case output = <-out: - if output == nil { + if output == nil || ctx.Err() != nil { return nil } + // if the inputs contained at least one chunked array // then we want to return chunked output if hasChunked { diff --git a/go/arrow/compute/expression.go b/go/arrow/compute/expression.go index 9f20c9704dca7..fbb6c502d98ab 100644 --- a/go/arrow/compute/expression.go +++ b/go/arrow/compute/expression.go @@ -28,14 +28,14 @@ import ( "strconv" "strings" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/compute/internal/kernels" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/compute/internal/kernels" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" ) var hashSeed = maphash.MakeSeed() diff --git a/go/arrow/compute/expression_test.go b/go/arrow/compute/expression_test.go index 71b1b64c4b0c1..b3b44ae1faa61 100644 --- a/go/arrow/compute/expression_test.go +++ b/go/arrow/compute/expression_test.go @@ -22,11 +22,11 @@ package compute_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/compute/exprs/builders.go b/go/arrow/compute/exprs/builders.go index 39606c27e714a..0b694525d1b47 100644 --- a/go/arrow/compute/exprs/builders.go +++ b/go/arrow/compute/exprs/builders.go @@ -25,8 +25,8 @@ import ( "strings" "unicode" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/compute" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/compute" "github.com/substrait-io/substrait-go/expr" "github.com/substrait-io/substrait-go/extensions" "github.com/substrait-io/substrait-go/types" diff --git a/go/arrow/compute/exprs/builders_test.go b/go/arrow/compute/exprs/builders_test.go index e42d7569a8f03..69501622359d5 100644 --- a/go/arrow/compute/exprs/builders_test.go +++ b/go/arrow/compute/exprs/builders_test.go @@ -21,8 +21,8 @@ package exprs_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/compute/exprs" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/compute/exprs" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/substrait-io/substrait-go/expr" diff --git a/go/arrow/compute/exprs/exec.go b/go/arrow/compute/exprs/exec.go index d63752696082b..800ffe62f2559 100644 --- a/go/arrow/compute/exprs/exec.go +++ b/go/arrow/compute/exprs/exec.go @@ -23,15 +23,15 @@ import ( "fmt" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/endian" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" "github.com/substrait-io/substrait-go/expr" "github.com/substrait-io/substrait-go/extensions" "github.com/substrait-io/substrait-go/types" diff --git a/go/arrow/compute/exprs/exec_internal_test.go b/go/arrow/compute/exprs/exec_internal_test.go index 9475a9a8cbca9..680bf36f11958 100644 --- a/go/arrow/compute/exprs/exec_internal_test.go +++ b/go/arrow/compute/exprs/exec_internal_test.go @@ -23,10 +23,10 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/go/arrow/compute/exprs/exec_test.go b/go/arrow/compute/exprs/exec_test.go index e30d2afdb73f8..65cafc8e62ddb 100644 --- a/go/arrow/compute/exprs/exec_test.go +++ b/go/arrow/compute/exprs/exec_test.go @@ -23,12 +23,12 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute" - "github.com/apache/arrow/go/v14/arrow/compute/exprs" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute" + "github.com/apache/arrow/go/v15/arrow/compute/exprs" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/substrait-io/substrait-go/expr" diff --git a/go/arrow/compute/exprs/extension_types.go b/go/arrow/compute/exprs/extension_types.go index ce375492d67f1..db7992b8f089f 100644 --- a/go/arrow/compute/exprs/extension_types.go +++ b/go/arrow/compute/exprs/extension_types.go @@ -24,8 +24,8 @@ import ( "reflect" "strings" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" ) type simpleExtensionTypeFactory[P comparable] struct { diff --git a/go/arrow/compute/exprs/field_refs.go b/go/arrow/compute/exprs/field_refs.go index fd4313818195e..e95e3c8c9abb9 100644 --- a/go/arrow/compute/exprs/field_refs.go +++ b/go/arrow/compute/exprs/field_refs.go @@ -21,11 +21,11 @@ package exprs import ( "fmt" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" "github.com/substrait-io/substrait-go/expr" ) diff --git a/go/arrow/compute/exprs/types.go b/go/arrow/compute/exprs/types.go index 9f807e1e1ecbc..6a5b81d11b3eb 100644 --- a/go/arrow/compute/exprs/types.go +++ b/go/arrow/compute/exprs/types.go @@ -24,8 +24,8 @@ import ( "strconv" "strings" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/compute" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/compute" "github.com/substrait-io/substrait-go/expr" "github.com/substrait-io/substrait-go/extensions" "github.com/substrait-io/substrait-go/types" diff --git a/go/arrow/compute/fieldref.go b/go/arrow/compute/fieldref.go index ee6f39948ebf1..565ae3bfadbd0 100644 --- a/go/arrow/compute/fieldref.go +++ b/go/arrow/compute/fieldref.go @@ -27,8 +27,8 @@ import ( "unicode" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" ) var ( diff --git a/go/arrow/compute/fieldref_test.go b/go/arrow/compute/fieldref_test.go index c38ad8ac1fe30..c4fa72182835f 100644 --- a/go/arrow/compute/fieldref_test.go +++ b/go/arrow/compute/fieldref_test.go @@ -19,10 +19,10 @@ package compute_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/compute/functions.go b/go/arrow/compute/functions.go index a1905f91850f0..b85062721d9b8 100644 --- a/go/arrow/compute/functions.go +++ b/go/arrow/compute/functions.go @@ -23,8 +23,8 @@ import ( "fmt" "strings" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/compute/exec" ) type Function interface { diff --git a/go/arrow/compute/functions_test.go b/go/arrow/compute/functions_test.go index b37da285badd0..1e6bbd598e0c9 100644 --- a/go/arrow/compute/functions_test.go +++ b/go/arrow/compute/functions_test.go @@ -21,8 +21,8 @@ package compute_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/compute" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/compute" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/go/arrow/compute/internal/kernels/base_arithmetic.go b/go/arrow/compute/internal/kernels/base_arithmetic.go index 67e80af74f511..c7950877264df 100644 --- a/go/arrow/compute/internal/kernels/base_arithmetic.go +++ b/go/arrow/compute/internal/kernels/base_arithmetic.go @@ -24,11 +24,11 @@ import ( "math/bits" "github.com/JohnCGriffin/overflow" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/internal/debug" "golang.org/x/exp/constraints" ) diff --git a/go/arrow/compute/internal/kernels/base_arithmetic_amd64.go b/go/arrow/compute/internal/kernels/base_arithmetic_amd64.go index 0e78e6c9183ff..942b8e4ff5600 100644 --- a/go/arrow/compute/internal/kernels/base_arithmetic_amd64.go +++ b/go/arrow/compute/internal/kernels/base_arithmetic_amd64.go @@ -21,8 +21,8 @@ package kernels import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/internal/debug" "golang.org/x/exp/constraints" "golang.org/x/sys/cpu" ) diff --git a/go/arrow/compute/internal/kernels/base_arithmetic_avx2_amd64.go b/go/arrow/compute/internal/kernels/base_arithmetic_avx2_amd64.go index 29cce7830e0f4..6814e834b4bf3 100644 --- a/go/arrow/compute/internal/kernels/base_arithmetic_avx2_amd64.go +++ b/go/arrow/compute/internal/kernels/base_arithmetic_avx2_amd64.go @@ -21,7 +21,7 @@ package kernels import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) //go:noescape diff --git a/go/arrow/compute/internal/kernels/base_arithmetic_sse4_amd64.go b/go/arrow/compute/internal/kernels/base_arithmetic_sse4_amd64.go index e9b03551f81b1..633ec5f4f18e5 100644 --- a/go/arrow/compute/internal/kernels/base_arithmetic_sse4_amd64.go +++ b/go/arrow/compute/internal/kernels/base_arithmetic_sse4_amd64.go @@ -21,7 +21,7 @@ package kernels import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) //go:noescape diff --git a/go/arrow/compute/internal/kernels/basic_arithmetic_noasm.go b/go/arrow/compute/internal/kernels/basic_arithmetic_noasm.go index 4f160a1411091..8e46ca030c8b7 100644 --- a/go/arrow/compute/internal/kernels/basic_arithmetic_noasm.go +++ b/go/arrow/compute/internal/kernels/basic_arithmetic_noasm.go @@ -19,7 +19,7 @@ package kernels import ( - "github.com/apache/arrow/go/v14/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/compute/exec" "golang.org/x/exp/constraints" ) diff --git a/go/arrow/compute/internal/kernels/boolean_cast.go b/go/arrow/compute/internal/kernels/boolean_cast.go index 18d04c845c41d..923c5b3f54512 100644 --- a/go/arrow/compute/internal/kernels/boolean_cast.go +++ b/go/arrow/compute/internal/kernels/boolean_cast.go @@ -22,9 +22,9 @@ import ( "strconv" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/compute/exec" ) func isNonZero[T exec.FixedWidthTypes](ctx *exec.KernelCtx, in []T, out []byte) error { diff --git a/go/arrow/compute/internal/kernels/cast.go b/go/arrow/compute/internal/kernels/cast.go index 5a71206b2476c..bc4ee3abd128c 100644 --- a/go/arrow/compute/internal/kernels/cast.go +++ b/go/arrow/compute/internal/kernels/cast.go @@ -19,9 +19,9 @@ package kernels import ( - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute/exec" ) type CastOptions struct { diff --git a/go/arrow/compute/internal/kernels/cast_numeric.go b/go/arrow/compute/internal/kernels/cast_numeric.go index 4e5c5c1d9ea77..2e893c7205f6a 100644 --- a/go/arrow/compute/internal/kernels/cast_numeric.go +++ b/go/arrow/compute/internal/kernels/cast_numeric.go @@ -21,7 +21,7 @@ package kernels import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) var castNumericUnsafe func(itype, otype arrow.Type, in, out []byte, len int) = castNumericGo diff --git a/go/arrow/compute/internal/kernels/cast_numeric_avx2_amd64.go b/go/arrow/compute/internal/kernels/cast_numeric_avx2_amd64.go index 6b28441ec083e..eafa4b41001ae 100644 --- a/go/arrow/compute/internal/kernels/cast_numeric_avx2_amd64.go +++ b/go/arrow/compute/internal/kernels/cast_numeric_avx2_amd64.go @@ -21,7 +21,7 @@ package kernels import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) //go:noescape diff --git a/go/arrow/compute/internal/kernels/cast_numeric_neon_arm64.go b/go/arrow/compute/internal/kernels/cast_numeric_neon_arm64.go index d53a4486f6983..75ad79fd0e7f3 100644 --- a/go/arrow/compute/internal/kernels/cast_numeric_neon_arm64.go +++ b/go/arrow/compute/internal/kernels/cast_numeric_neon_arm64.go @@ -21,7 +21,7 @@ package kernels import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" "golang.org/x/sys/cpu" ) diff --git a/go/arrow/compute/internal/kernels/cast_numeric_sse4_amd64.go b/go/arrow/compute/internal/kernels/cast_numeric_sse4_amd64.go index 1cbea033dec27..d04f393c4c099 100644 --- a/go/arrow/compute/internal/kernels/cast_numeric_sse4_amd64.go +++ b/go/arrow/compute/internal/kernels/cast_numeric_sse4_amd64.go @@ -21,7 +21,7 @@ package kernels import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) //go:noescape diff --git a/go/arrow/compute/internal/kernels/cast_temporal.go b/go/arrow/compute/internal/kernels/cast_temporal.go index 82fce1e33c959..542a8a4590b28 100644 --- a/go/arrow/compute/internal/kernels/cast_temporal.go +++ b/go/arrow/compute/internal/kernels/cast_temporal.go @@ -24,10 +24,10 @@ import ( "time" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/internal/debug" ) const millisecondsInDay = 86400000 diff --git a/go/arrow/compute/internal/kernels/helpers.go b/go/arrow/compute/internal/kernels/helpers.go index ed25071c9d87f..686c4b3e0c29a 100644 --- a/go/arrow/compute/internal/kernels/helpers.go +++ b/go/arrow/compute/internal/kernels/helpers.go @@ -22,13 +22,13 @@ import ( "fmt" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" - "github.com/apache/arrow/go/v14/internal/bitutils" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" + "github.com/apache/arrow/go/v15/internal/bitutils" "golang.org/x/exp/constraints" ) diff --git a/go/arrow/compute/internal/kernels/numeric_cast.go b/go/arrow/compute/internal/kernels/numeric_cast.go index 8e5350754723d..c055552bf7ff5 100644 --- a/go/arrow/compute/internal/kernels/numeric_cast.go +++ b/go/arrow/compute/internal/kernels/numeric_cast.go @@ -23,13 +23,13 @@ import ( "strconv" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/internal/bitutils" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/internal/bitutils" "golang.org/x/exp/constraints" ) diff --git a/go/arrow/compute/internal/kernels/rounding.go b/go/arrow/compute/internal/kernels/rounding.go index 2f58a9faea8a2..8a1bec1180ac8 100644 --- a/go/arrow/compute/internal/kernels/rounding.go +++ b/go/arrow/compute/internal/kernels/rounding.go @@ -22,11 +22,11 @@ import ( "fmt" "math" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/scalar" "golang.org/x/exp/constraints" ) diff --git a/go/arrow/compute/internal/kernels/scalar_arithmetic.go b/go/arrow/compute/internal/kernels/scalar_arithmetic.go index 9cb32ae64a4a9..cf17e9fd9548b 100644 --- a/go/arrow/compute/internal/kernels/scalar_arithmetic.go +++ b/go/arrow/compute/internal/kernels/scalar_arithmetic.go @@ -22,13 +22,13 @@ import ( "fmt" "time" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/scalar" ) // scalar kernel that ignores (assumed all-null inputs) and returns null diff --git a/go/arrow/compute/internal/kernels/scalar_boolean.go b/go/arrow/compute/internal/kernels/scalar_boolean.go index 812f4ad1bd363..0707c92e6a198 100644 --- a/go/arrow/compute/internal/kernels/scalar_boolean.go +++ b/go/arrow/compute/internal/kernels/scalar_boolean.go @@ -19,9 +19,9 @@ package kernels import ( - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/scalar" ) type computeWordFN func(leftTrue, leftFalse, rightTrue, rightFalse uint64) (outValid, outData uint64) diff --git a/go/arrow/compute/internal/kernels/scalar_comparison_amd64.go b/go/arrow/compute/internal/kernels/scalar_comparison_amd64.go index 585d1bff01ab6..8e5ce1ab7c1ad 100644 --- a/go/arrow/compute/internal/kernels/scalar_comparison_amd64.go +++ b/go/arrow/compute/internal/kernels/scalar_comparison_amd64.go @@ -21,8 +21,8 @@ package kernels import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/compute/exec" "golang.org/x/sys/cpu" ) diff --git a/go/arrow/compute/internal/kernels/scalar_comparison_avx2_amd64.go b/go/arrow/compute/internal/kernels/scalar_comparison_avx2_amd64.go index 868179057ccf4..cf9fc1eeedbb8 100644 --- a/go/arrow/compute/internal/kernels/scalar_comparison_avx2_amd64.go +++ b/go/arrow/compute/internal/kernels/scalar_comparison_avx2_amd64.go @@ -21,7 +21,7 @@ package kernels import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) //go:noescape diff --git a/go/arrow/compute/internal/kernels/scalar_comparison_noasm.go b/go/arrow/compute/internal/kernels/scalar_comparison_noasm.go index 56abad42b1ffc..c0aef5a04e9b8 100644 --- a/go/arrow/compute/internal/kernels/scalar_comparison_noasm.go +++ b/go/arrow/compute/internal/kernels/scalar_comparison_noasm.go @@ -18,7 +18,7 @@ package kernels -import "github.com/apache/arrow/go/v14/arrow/compute/exec" +import "github.com/apache/arrow/go/v15/arrow/compute/exec" func genCompareKernel[T exec.NumericTypes](op CompareOperator) *CompareData { return genGoCompareKernel(getCmpOp[T](op)) diff --git a/go/arrow/compute/internal/kernels/scalar_comparison_sse4_amd64.go b/go/arrow/compute/internal/kernels/scalar_comparison_sse4_amd64.go index 7cf96a4171d6c..f8b36a1e4be76 100644 --- a/go/arrow/compute/internal/kernels/scalar_comparison_sse4_amd64.go +++ b/go/arrow/compute/internal/kernels/scalar_comparison_sse4_amd64.go @@ -21,7 +21,7 @@ package kernels import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) //go:noescape diff --git a/go/arrow/compute/internal/kernels/scalar_comparisons.go b/go/arrow/compute/internal/kernels/scalar_comparisons.go index 8a957eafd85da..9a7640a8d8a39 100644 --- a/go/arrow/compute/internal/kernels/scalar_comparisons.go +++ b/go/arrow/compute/internal/kernels/scalar_comparisons.go @@ -23,14 +23,14 @@ import ( "fmt" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/scalar" - "github.com/apache/arrow/go/v14/internal/bitutils" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/scalar" + "github.com/apache/arrow/go/v15/internal/bitutils" ) type binaryKernel func(left, right, out []byte, offset int) diff --git a/go/arrow/compute/internal/kernels/string_casts.go b/go/arrow/compute/internal/kernels/string_casts.go index 3070514665d7d..76da901e33f8d 100644 --- a/go/arrow/compute/internal/kernels/string_casts.go +++ b/go/arrow/compute/internal/kernels/string_casts.go @@ -23,12 +23,12 @@ import ( "strconv" "unicode/utf8" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/float16" - "github.com/apache/arrow/go/v14/internal/bitutils" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/internal/bitutils" ) func validateUtf8Fsb(input *exec.ArraySpan) error { diff --git a/go/arrow/compute/internal/kernels/types.go b/go/arrow/compute/internal/kernels/types.go index 2788fb7054c53..481eab36059f8 100644 --- a/go/arrow/compute/internal/kernels/types.go +++ b/go/arrow/compute/internal/kernels/types.go @@ -21,10 +21,10 @@ package kernels import ( "fmt" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/scalar" ) var ( diff --git a/go/arrow/compute/internal/kernels/vector_hash.go b/go/arrow/compute/internal/kernels/vector_hash.go index e0ede826e876e..9401e31cc5b09 100644 --- a/go/arrow/compute/internal/kernels/vector_hash.go +++ b/go/arrow/compute/internal/kernels/vector_hash.go @@ -21,13 +21,13 @@ package kernels import ( "fmt" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/bitutils" - "github.com/apache/arrow/go/v14/internal/hashing" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/bitutils" + "github.com/apache/arrow/go/v15/internal/hashing" ) type HashState interface { diff --git a/go/arrow/compute/internal/kernels/vector_run_end_encode.go b/go/arrow/compute/internal/kernels/vector_run_end_encode.go index e073ff1ff5308..a147bf7d50170 100644 --- a/go/arrow/compute/internal/kernels/vector_run_end_encode.go +++ b/go/arrow/compute/internal/kernels/vector_run_end_encode.go @@ -24,14 +24,14 @@ import ( "sort" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/float16" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" ) type RunEndEncodeState struct { diff --git a/go/arrow/compute/internal/kernels/vector_selection.go b/go/arrow/compute/internal/kernels/vector_selection.go index c7a902bd008d0..714e452325bfd 100644 --- a/go/arrow/compute/internal/kernels/vector_selection.go +++ b/go/arrow/compute/internal/kernels/vector_selection.go @@ -22,13 +22,13 @@ import ( "fmt" "math" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/bitutils" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/bitutils" ) type NullSelectionBehavior int8 diff --git a/go/arrow/compute/registry.go b/go/arrow/compute/registry.go index 4f1c435fb680a..379e0ccbe86a9 100644 --- a/go/arrow/compute/registry.go +++ b/go/arrow/compute/registry.go @@ -21,7 +21,7 @@ package compute import ( "sync" - "github.com/apache/arrow/go/v14/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/internal/debug" "golang.org/x/exp/maps" "golang.org/x/exp/slices" ) diff --git a/go/arrow/compute/registry_test.go b/go/arrow/compute/registry_test.go index 428f7cf1d8fda..4e4f44f1d39b6 100644 --- a/go/arrow/compute/registry_test.go +++ b/go/arrow/compute/registry_test.go @@ -23,9 +23,9 @@ import ( "errors" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/compute" - "github.com/apache/arrow/go/v14/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/compute" + "github.com/apache/arrow/go/v15/arrow/compute/exec" "github.com/stretchr/testify/assert" "golang.org/x/exp/slices" ) diff --git a/go/arrow/compute/scalar_bool.go b/go/arrow/compute/scalar_bool.go index 49c745688284e..1f28a6e2bfcb2 100644 --- a/go/arrow/compute/scalar_bool.go +++ b/go/arrow/compute/scalar_bool.go @@ -21,9 +21,9 @@ package compute import ( "fmt" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/compute/internal/kernels" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/compute/internal/kernels" ) var ( diff --git a/go/arrow/compute/scalar_bool_test.go b/go/arrow/compute/scalar_bool_test.go index 6eadc973d6965..bd4f3c5c0df2b 100644 --- a/go/arrow/compute/scalar_bool_test.go +++ b/go/arrow/compute/scalar_bool_test.go @@ -23,11 +23,11 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" "github.com/stretchr/testify/require" ) diff --git a/go/arrow/compute/scalar_compare.go b/go/arrow/compute/scalar_compare.go index 476f37711e289..24a4191a10999 100644 --- a/go/arrow/compute/scalar_compare.go +++ b/go/arrow/compute/scalar_compare.go @@ -21,9 +21,9 @@ package compute import ( "context" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/compute/internal/kernels" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/compute/internal/kernels" ) type compareFunction struct { diff --git a/go/arrow/compute/scalar_compare_test.go b/go/arrow/compute/scalar_compare_test.go index 13fdd24fdd6b3..d209f72c800b0 100644 --- a/go/arrow/compute/scalar_compare_test.go +++ b/go/arrow/compute/scalar_compare_test.go @@ -24,15 +24,15 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/compute" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/compute/internal/kernels" - "github.com/apache/arrow/go/v14/arrow/internal/testing/gen" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/compute" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/compute/internal/kernels" + "github.com/apache/arrow/go/v15/arrow/internal/testing/gen" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" diff --git a/go/arrow/compute/selection.go b/go/arrow/compute/selection.go index ed6d80418d3f6..90bc5280ef2a7 100644 --- a/go/arrow/compute/selection.go +++ b/go/arrow/compute/selection.go @@ -22,10 +22,10 @@ import ( "context" "fmt" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/compute/internal/kernels" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/compute/internal/kernels" "golang.org/x/sync/errgroup" ) diff --git a/go/arrow/compute/utils.go b/go/arrow/compute/utils.go index cc4d6edc48ec8..9de6523fd0b5f 100644 --- a/go/arrow/compute/utils.go +++ b/go/arrow/compute/utils.go @@ -24,12 +24,12 @@ import ( "math" "time" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/compute/internal/kernels" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/compute/internal/kernels" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" "golang.org/x/xerrors" ) diff --git a/go/arrow/compute/vector_hash.go b/go/arrow/compute/vector_hash.go index 5f9aec55c55bc..144c123380fe2 100644 --- a/go/arrow/compute/vector_hash.go +++ b/go/arrow/compute/vector_hash.go @@ -21,8 +21,8 @@ package compute import ( "context" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/compute/internal/kernels" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/compute/internal/kernels" ) var ( diff --git a/go/arrow/compute/vector_hash_test.go b/go/arrow/compute/vector_hash_test.go index 50c96ddc04f6c..9410720de7941 100644 --- a/go/arrow/compute/vector_hash_test.go +++ b/go/arrow/compute/vector_hash_test.go @@ -23,13 +23,13 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" diff --git a/go/arrow/compute/vector_run_end_test.go b/go/arrow/compute/vector_run_end_test.go index 8d894d96b8f0a..51c0f834ceb6a 100644 --- a/go/arrow/compute/vector_run_end_test.go +++ b/go/arrow/compute/vector_run_end_test.go @@ -25,13 +25,13 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/compute" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/internal/testing/gen" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/compute" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/internal/testing/gen" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/suite" ) diff --git a/go/arrow/compute/vector_run_ends.go b/go/arrow/compute/vector_run_ends.go index 48f3dcba8c481..3e47c67de3c35 100644 --- a/go/arrow/compute/vector_run_ends.go +++ b/go/arrow/compute/vector_run_ends.go @@ -21,8 +21,8 @@ package compute import ( "context" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/compute/internal/kernels" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/compute/internal/kernels" ) var ( diff --git a/go/arrow/compute/vector_selection_test.go b/go/arrow/compute/vector_selection_test.go index 10788d60e10d2..f44840ba72034 100644 --- a/go/arrow/compute/vector_selection_test.go +++ b/go/arrow/compute/vector_selection_test.go @@ -24,15 +24,15 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute" - "github.com/apache/arrow/go/v14/arrow/compute/exec" - "github.com/apache/arrow/go/v14/arrow/compute/internal/kernels" - "github.com/apache/arrow/go/v14/arrow/internal/testing/gen" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" - "github.com/apache/arrow/go/v14/internal/types" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute" + "github.com/apache/arrow/go/v15/arrow/compute/exec" + "github.com/apache/arrow/go/v15/arrow/compute/internal/kernels" + "github.com/apache/arrow/go/v15/arrow/internal/testing/gen" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" + "github.com/apache/arrow/go/v15/internal/types" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" diff --git a/go/arrow/csv/common.go b/go/arrow/csv/common.go index 91f42897a24a6..2a1f7300a986f 100644 --- a/go/arrow/csv/common.go +++ b/go/arrow/csv/common.go @@ -22,8 +22,8 @@ import ( "errors" "fmt" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/memory" ) var ( diff --git a/go/arrow/csv/reader.go b/go/arrow/csv/reader.go index 80af520a12b7f..e58b426d837fb 100644 --- a/go/arrow/csv/reader.go +++ b/go/arrow/csv/reader.go @@ -29,13 +29,13 @@ import ( "time" "unicode/utf8" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/float16" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" ) // Reader wraps encoding/csv.Reader and creates array.Records from a schema. diff --git a/go/arrow/csv/reader_test.go b/go/arrow/csv/reader_test.go index f481cd2e961d3..dfcb6625bd7cc 100644 --- a/go/arrow/csv/reader_test.go +++ b/go/arrow/csv/reader_test.go @@ -25,13 +25,13 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/csv" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/types" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/csv" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/types" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/go/arrow/csv/transformer.go b/go/arrow/csv/transformer.go index 7c8eafcee1a59..0f0181520b847 100644 --- a/go/arrow/csv/transformer.go +++ b/go/arrow/csv/transformer.go @@ -25,8 +25,8 @@ import ( "math/big" "strconv" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" ) func (w *Writer) transformColToStringArr(typ arrow.DataType, col arrow.Array) []string { diff --git a/go/arrow/csv/writer.go b/go/arrow/csv/writer.go index e558364ecf16d..a672008b58a07 100644 --- a/go/arrow/csv/writer.go +++ b/go/arrow/csv/writer.go @@ -22,7 +22,7 @@ import ( "strconv" "sync" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) // Writer wraps encoding/csv.Writer and writes arrow.Record based on a schema. diff --git a/go/arrow/csv/writer_test.go b/go/arrow/csv/writer_test.go index f945023790426..1918e2e492dff 100644 --- a/go/arrow/csv/writer_test.go +++ b/go/arrow/csv/writer_test.go @@ -25,14 +25,14 @@ import ( "log" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/csv" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/float16" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/types" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/csv" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/types" "github.com/google/uuid" ) diff --git a/go/arrow/datatype.go b/go/arrow/datatype.go index f0fb24ec873c5..1e5d8fb98aa59 100644 --- a/go/arrow/datatype.go +++ b/go/arrow/datatype.go @@ -21,7 +21,7 @@ import ( "hash/maphash" "strings" - "github.com/apache/arrow/go/v14/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/internal/debug" ) // Type is a logical type. They can be expressed as @@ -210,6 +210,11 @@ type BinaryDataType interface { binary() } +type BinaryViewDataType interface { + BinaryDataType + view() +} + type OffsetsDataType interface { DataType OffsetTypeTraits() OffsetTraits @@ -272,6 +277,8 @@ func (b BufferSpec) Equals(other BufferSpec) bool { type DataTypeLayout struct { Buffers []BufferSpec HasDict bool + // VariadicSpec is what the buffers beyond len(Buffers) are expected to conform to. + VariadicSpec *BufferSpec } func SpecFixedWidth(w int) BufferSpec { return BufferSpec{KindFixedWidth, w} } diff --git a/go/arrow/datatype_binary.go b/go/arrow/datatype_binary.go index a3a8568645052..f3e601f08ec79 100644 --- a/go/arrow/datatype_binary.go +++ b/go/arrow/datatype_binary.go @@ -83,16 +83,57 @@ func (t *LargeStringType) Layout() DataTypeLayout { func (t *LargeStringType) OffsetTypeTraits() OffsetTraits { return Int64Traits } func (LargeStringType) IsUtf8() bool { return true } +type BinaryViewType struct{} + +func (*BinaryViewType) ID() Type { return BINARY_VIEW } +func (*BinaryViewType) Name() string { return "binary_view" } +func (*BinaryViewType) String() string { return "binary_view" } +func (*BinaryViewType) IsUtf8() bool { return false } +func (*BinaryViewType) binary() {} +func (*BinaryViewType) view() {} +func (t *BinaryViewType) Fingerprint() string { return typeFingerprint(t) } +func (*BinaryViewType) Layout() DataTypeLayout { + variadic := SpecVariableWidth() + return DataTypeLayout{ + Buffers: []BufferSpec{SpecBitmap(), SpecFixedWidth(ViewHeaderSizeBytes)}, + VariadicSpec: &variadic, + } +} + +type StringViewType struct{} + +func (*StringViewType) ID() Type { return STRING_VIEW } +func (*StringViewType) Name() string { return "string_view" } +func (*StringViewType) String() string { return "string_view" } +func (*StringViewType) IsUtf8() bool { return true } +func (*StringViewType) binary() {} +func (*StringViewType) view() {} +func (t *StringViewType) Fingerprint() string { return typeFingerprint(t) } +func (*StringViewType) Layout() DataTypeLayout { + variadic := SpecVariableWidth() + return DataTypeLayout{ + Buffers: []BufferSpec{SpecBitmap(), SpecFixedWidth(ViewHeaderSizeBytes)}, + VariadicSpec: &variadic, + } +} + var ( BinaryTypes = struct { Binary BinaryDataType String BinaryDataType LargeBinary BinaryDataType LargeString BinaryDataType + BinaryView BinaryDataType + StringView BinaryDataType }{ Binary: &BinaryType{}, String: &StringType{}, LargeBinary: &LargeBinaryType{}, LargeString: &LargeStringType{}, + BinaryView: &BinaryViewType{}, + StringView: &StringViewType{}, } + + _ BinaryViewDataType = (*StringViewType)(nil) + _ BinaryViewDataType = (*BinaryViewType)(nil) ) diff --git a/go/arrow/datatype_binary_test.go b/go/arrow/datatype_binary_test.go index c47df3da1db9c..083d69ee3e5d4 100644 --- a/go/arrow/datatype_binary_test.go +++ b/go/arrow/datatype_binary_test.go @@ -19,7 +19,7 @@ package arrow_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) func TestBinaryType(t *testing.T) { @@ -81,3 +81,33 @@ func TestLargeStringType(t *testing.T) { t.Fatalf("invalid string type stringer. got=%v, want=%v", got, want) } } + +func TestBinaryViewType(t *testing.T) { + var nt *arrow.BinaryViewType + if got, want := nt.ID(), arrow.BINARY_VIEW; got != want { + t.Fatalf("invalid string type id. got=%v, want=%v", got, want) + } + + if got, want := nt.Name(), "binary_view"; got != want { + t.Fatalf("invalid string type name. got=%v, want=%v", got, want) + } + + if got, want := nt.String(), "binary_view"; got != want { + t.Fatalf("invalid string type stringer. got=%v, want=%v", got, want) + } +} + +func TestStringViewType(t *testing.T) { + var nt *arrow.StringViewType + if got, want := nt.ID(), arrow.STRING_VIEW; got != want { + t.Fatalf("invalid string type id. got=%v, want=%v", got, want) + } + + if got, want := nt.Name(), "string_view"; got != want { + t.Fatalf("invalid string type name. got=%v, want=%v", got, want) + } + + if got, want := nt.String(), "string_view"; got != want { + t.Fatalf("invalid string type stringer. got=%v, want=%v", got, want) + } +} diff --git a/go/arrow/datatype_extension_test.go b/go/arrow/datatype_extension_test.go index c542b22b769e6..9811800400506 100644 --- a/go/arrow/datatype_extension_test.go +++ b/go/arrow/datatype_extension_test.go @@ -20,8 +20,8 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/internal/types" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/internal/types" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" ) diff --git a/go/arrow/datatype_fixedwidth.go b/go/arrow/datatype_fixedwidth.go index fc0b3aea56e70..bcbc8ef6aec87 100644 --- a/go/arrow/datatype_fixedwidth.go +++ b/go/arrow/datatype_fixedwidth.go @@ -21,7 +21,7 @@ import ( "strconv" "time" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/internal/json" "golang.org/x/xerrors" ) diff --git a/go/arrow/datatype_fixedwidth_test.go b/go/arrow/datatype_fixedwidth_test.go index 1d3a07de09c1d..918572d40b8f4 100644 --- a/go/arrow/datatype_fixedwidth_test.go +++ b/go/arrow/datatype_fixedwidth_test.go @@ -20,7 +20,7 @@ import ( "testing" "time" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/datatype_nested.go b/go/arrow/datatype_nested.go index 4ae4880334620..e381cd7047e45 100644 --- a/go/arrow/datatype_nested.go +++ b/go/arrow/datatype_nested.go @@ -22,7 +22,7 @@ import ( "strconv" "strings" - "github.com/apache/arrow/go/v14/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/internal/debug" ) type ( diff --git a/go/arrow/datatype_null_test.go b/go/arrow/datatype_null_test.go index 286a72fb7fea3..57cddfadb8ef8 100644 --- a/go/arrow/datatype_null_test.go +++ b/go/arrow/datatype_null_test.go @@ -19,7 +19,7 @@ package arrow_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) func TestNullType(t *testing.T) { diff --git a/go/arrow/datatype_viewheader.go b/go/arrow/datatype_viewheader.go new file mode 100644 index 0000000000000..54b9256b34604 --- /dev/null +++ b/go/arrow/datatype_viewheader.go @@ -0,0 +1,141 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package arrow + +import ( + "bytes" + "unsafe" + + "github.com/apache/arrow/go/v15/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" +) + +const ( + ViewPrefixLen = 4 + viewInlineSize = 12 +) + +func IsViewInline(length int) bool { + return length < viewInlineSize +} + +// ViewHeader is a variable length string (utf8) or byte slice with +// a 4 byte prefix and inline optimization for small values (12 bytes +// or fewer). This is similar to Go's standard string but limited by +// a length of Uint32Max and up to the first four bytes of the string +// are copied into the struct. This prefix allows failing comparisons +// early and can reduce CPU cache working set when dealing with short +// strings. +// +// There are two situations: +// +// Entirely inlined string data +// |----|------------| +// ^ ^ +// | | +// size inline string data, zero padded +// +// Reference into buffer +// |----|----|----|----| +// ^ ^ ^ ^ +// | | | | +// size prefix buffer index and offset to out-of-line portion +// +// Adapted from TU Munich's UmbraDB [1], Velox, DuckDB. +// +// [1]: https://db.in.tum.de/~freitag/papers/p29-neumann-cidr20.pdf +type ViewHeader struct { + size int32 + // the first 4 bytes of this are the prefix for the string + // if size <= StringHeaderInlineSize, then the entire string + // is in the data array and is zero padded. + // if size > StringHeaderInlineSize, the next 8 bytes are 2 uint32 + // values which are the buffer index and offset in that buffer + // containing the full string. + data [viewInlineSize]byte +} + +func (sh *ViewHeader) IsInline() bool { + return sh.size <= int32(viewInlineSize) +} + +func (sh *ViewHeader) Len() int { return int(sh.size) } +func (sh *ViewHeader) Prefix() [ViewPrefixLen]byte { + return *(*[4]byte)(unsafe.Pointer(&sh.data)) +} + +func (sh *ViewHeader) BufferIndex() int32 { + return int32(endian.Native.Uint32(sh.data[ViewPrefixLen:])) +} + +func (sh *ViewHeader) BufferOffset() int32 { + return int32(endian.Native.Uint32(sh.data[ViewPrefixLen+4:])) +} + +func (sh *ViewHeader) InlineBytes() (data []byte) { + debug.Assert(sh.IsInline(), "calling InlineBytes on non-inline ViewHeader") + return sh.data[:sh.size] +} + +func (sh *ViewHeader) SetBytes(data []byte) int { + sh.size = int32(len(data)) + if sh.IsInline() { + return copy(sh.data[:], data) + } + return copy(sh.data[:4], data) +} + +func (sh *ViewHeader) SetString(data string) int { + sh.size = int32(len(data)) + if sh.IsInline() { + return copy(sh.data[:], data) + } + return copy(sh.data[:4], data) +} + +func (sh *ViewHeader) SetIndexOffset(bufferIndex, offset int32) { + endian.Native.PutUint32(sh.data[ViewPrefixLen:], uint32(bufferIndex)) + endian.Native.PutUint32(sh.data[ViewPrefixLen+4:], uint32(offset)) +} + +func (sh *ViewHeader) Equals(buffers []*memory.Buffer, other *ViewHeader, otherBuffers []*memory.Buffer) bool { + if sh.sizeAndPrefixAsInt64() != other.sizeAndPrefixAsInt64() { + return false + } + + if sh.IsInline() { + return sh.inlinedAsInt64() == other.inlinedAsInt64() + } + + return bytes.Equal(sh.getBufferBytes(buffers), other.getBufferBytes(otherBuffers)) +} + +func (sh *ViewHeader) getBufferBytes(buffers []*memory.Buffer) []byte { + offset := sh.BufferOffset() + return buffers[sh.BufferIndex()].Bytes()[offset : offset+sh.size] +} + +func (sh *ViewHeader) inlinedAsInt64() int64 { + s := unsafe.Slice((*int64)(unsafe.Pointer(sh)), 2) + return s[1] +} + +func (sh *ViewHeader) sizeAndPrefixAsInt64() int64 { + s := unsafe.Slice((*int64)(unsafe.Pointer(sh)), 2) + return s[0] +} diff --git a/go/arrow/datatype_viewheader_inline.go b/go/arrow/datatype_viewheader_inline.go new file mode 100644 index 0000000000000..89ac1d06adcdf --- /dev/null +++ b/go/arrow/datatype_viewheader_inline.go @@ -0,0 +1,31 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build go1.20 + +package arrow + +import ( + "unsafe" + + "github.com/apache/arrow/go/v15/arrow/internal/debug" +) + +func (sh *ViewHeader) InlineString() (data string) { + debug.Assert(sh.IsInline(), "calling InlineString on non-inline ViewHeader") + + return unsafe.String((*byte)(unsafe.Pointer(&sh.data)), sh.size) +} diff --git a/go/arrow/datatype_viewheader_inline_go1.19.go b/go/arrow/datatype_viewheader_inline_go1.19.go new file mode 100644 index 0000000000000..aec66009d9492 --- /dev/null +++ b/go/arrow/datatype_viewheader_inline_go1.19.go @@ -0,0 +1,35 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build !go1.20 && !tinygo + +package arrow + +import ( + "reflect" + "unsafe" + + "github.com/apache/arrow/go/v15/arrow/internal/debug" +) + +func (sh *ViewHeader) InlineString() (data string) { + debug.Assert(sh.IsInline(), "calling InlineString on non-inline ViewHeader") + + h := (*reflect.StringHeader)(unsafe.Pointer(&data)) + h.Data = uintptr(unsafe.Pointer(&sh.data)) + h.Len = int(sh.size) + return +} diff --git a/go/arrow/datatype_viewheader_inline_tinygo.go b/go/arrow/datatype_viewheader_inline_tinygo.go new file mode 100644 index 0000000000000..bff63a273a722 --- /dev/null +++ b/go/arrow/datatype_viewheader_inline_tinygo.go @@ -0,0 +1,35 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build !go1.20 && tinygo + +package arrow + +import ( + "reflect" + "unsafe" + + "github.com/apache/arrow/go/v15/arrow/internal/debug" +) + +func (sh *ViewHeader) InlineString() (data string) { + debug.Assert(sh.IsInline(), "calling InlineString on non-inline ViewHeader") + + h := (*reflect.StringHeader)(unsafe.Pointer(&data)) + h.Data = uintptr(unsafe.Pointer(&sh.data)) + h.Len = uintptr(sh.size) + return +} diff --git a/go/arrow/decimal128/decimal128.go b/go/arrow/decimal128/decimal128.go index ff9c088b9d638..3b88dce1fa809 100644 --- a/go/arrow/decimal128/decimal128.go +++ b/go/arrow/decimal128/decimal128.go @@ -23,7 +23,7 @@ import ( "math/big" "math/bits" - "github.com/apache/arrow/go/v14/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/internal/debug" ) const ( @@ -261,7 +261,7 @@ func FromString(v string, prec, scale int32) (n Num, err error) { var precInBits = uint(math.Round(float64(prec+scale+1)/math.Log10(2))) + 1 var out *big.Float - out, _, err = big.ParseFloat(v, 10, 127, big.ToNearestEven) + out, _, err = big.ParseFloat(v, 10, 128, big.ToNearestEven) if err != nil { return } @@ -280,7 +280,7 @@ func FromString(v string, prec, scale int32) (n Num, err error) { // (e.g. C++) handles Decimal values. So if we're negative we'll subtract 0.5 and if // we're positive we'll add 0.5. p := (&big.Float{}).SetInt(scaleMultipliers[scale].BigInt()) - out.Mul(out, p).SetPrec(precInBits) + out.SetPrec(precInBits).Mul(out, p) if out.Signbit() { out.Sub(out, pt5) } else { diff --git a/go/arrow/decimal128/decimal128_test.go b/go/arrow/decimal128/decimal128_test.go index 102eede6d3b28..4cfd7db20db08 100644 --- a/go/arrow/decimal128/decimal128_test.go +++ b/go/arrow/decimal128/decimal128_test.go @@ -22,8 +22,9 @@ import ( "math/big" "testing" - "github.com/apache/arrow/go/v14/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal128" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestFromU64(t *testing.T) { @@ -698,3 +699,11 @@ func TestBitLen(t *testing.T) { _, err = decimal128.FromString(b.String(), decimal128.MaxPrecision, -1) assert.ErrorContains(t, err, "bitlen too large for decimal128") } + +func TestFromStringDecimal128b(t *testing.T) { + const decStr = "9323406071781562130.6457232358109488923" + + num, err := decimal128.FromString(decStr, 38, 19) + require.NoError(t, err) + assert.Equal(t, decStr, num.ToString(19)) +} diff --git a/go/arrow/decimal256/decimal256.go b/go/arrow/decimal256/decimal256.go index cf2157fdd48ed..5f2ad5f32165c 100644 --- a/go/arrow/decimal256/decimal256.go +++ b/go/arrow/decimal256/decimal256.go @@ -23,8 +23,8 @@ import ( "math/big" "math/bits" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/internal/debug" ) const ( diff --git a/go/arrow/decimal256/decimal256_test.go b/go/arrow/decimal256/decimal256_test.go index 9c734e23aa0d6..3be6a1944406f 100644 --- a/go/arrow/decimal256/decimal256_test.go +++ b/go/arrow/decimal256/decimal256_test.go @@ -23,7 +23,7 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/decimal256" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/doc.go b/go/arrow/doc.go index 4a5f5a3c9ef72..c8da330e4f8a1 100644 --- a/go/arrow/doc.go +++ b/go/arrow/doc.go @@ -36,7 +36,7 @@ To build with tinygo include the noasm build tag. */ package arrow -const PkgVersion = "14.0.0-SNAPSHOT" +const PkgVersion = "15.0.0-SNAPSHOT" //go:generate go run _tools/tmpl/main.go -i -data=numeric.tmpldata type_traits_numeric.gen.go.tmpl type_traits_numeric.gen_test.go.tmpl array/numeric.gen.go.tmpl array/numericbuilder.gen.go.tmpl array/bufferbuilder_numeric.gen.go.tmpl //go:generate go run _tools/tmpl/main.go -i -data=datatype_numeric.gen.go.tmpldata datatype_numeric.gen.go.tmpl tensor/numeric.gen.go.tmpl tensor/numeric.gen_test.go.tmpl diff --git a/go/arrow/encoded/ree_utils.go b/go/arrow/encoded/ree_utils.go index 1f71e7b5210cf..2ff4e7cadd38b 100644 --- a/go/arrow/encoded/ree_utils.go +++ b/go/arrow/encoded/ree_utils.go @@ -20,7 +20,7 @@ import ( "math" "sort" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) // FindPhysicalIndex performs a binary search on the run-ends to return diff --git a/go/arrow/encoded/ree_utils_test.go b/go/arrow/encoded/ree_utils_test.go index 785a3b105a47d..9470331002b80 100644 --- a/go/arrow/encoded/ree_utils_test.go +++ b/go/arrow/encoded/ree_utils_test.go @@ -21,10 +21,10 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/encoded" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/encoded" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/endian/endian.go b/go/arrow/endian/endian.go index 3ecda7b3600b7..d4865025b1fb0 100644 --- a/go/arrow/endian/endian.go +++ b/go/arrow/endian/endian.go @@ -17,8 +17,8 @@ package endian import ( - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/internal/flatbuf" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/internal/flatbuf" ) type Endianness flatbuf.Endianness diff --git a/go/arrow/example_test.go b/go/arrow/example_test.go index 10b3633f65536..b28cc9093739f 100644 --- a/go/arrow/example_test.go +++ b/go/arrow/example_test.go @@ -20,10 +20,10 @@ import ( "fmt" "log" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/tensor" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/tensor" ) // This example demonstrates how to build an array of int64 values using a builder and Append. diff --git a/go/arrow/flight/basic_auth_flight_test.go b/go/arrow/flight/basic_auth_flight_test.go index 0f63a400353d2..4097bf02edeff 100755 --- a/go/arrow/flight/basic_auth_flight_test.go +++ b/go/arrow/flight/basic_auth_flight_test.go @@ -22,7 +22,7 @@ import ( "io" "testing" - "github.com/apache/arrow/go/v14/arrow/flight" + "github.com/apache/arrow/go/v15/arrow/flight" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/credentials/insecure" diff --git a/go/arrow/flight/client.go b/go/arrow/flight/client.go index c9a1417accab4..8c400eb66b64d 100644 --- a/go/arrow/flight/client.go +++ b/go/arrow/flight/client.go @@ -26,7 +26,7 @@ import ( "strings" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow/flight/gen/flight" + "github.com/apache/arrow/go/v15/arrow/flight/gen/flight" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/metadata" diff --git a/go/arrow/flight/cookie_middleware_test.go b/go/arrow/flight/cookie_middleware_test.go index 2bde37fc0bb85..bdfe4fb32c1a5 100644 --- a/go/arrow/flight/cookie_middleware_test.go +++ b/go/arrow/flight/cookie_middleware_test.go @@ -28,7 +28,7 @@ import ( "testing" "time" - "github.com/apache/arrow/go/v14/arrow/flight" + "github.com/apache/arrow/go/v15/arrow/flight" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "google.golang.org/grpc" diff --git a/go/arrow/flight/example_flight_server_test.go b/go/arrow/flight/example_flight_server_test.go index 09f01361d13a0..d3a71b3b67ade 100755 --- a/go/arrow/flight/example_flight_server_test.go +++ b/go/arrow/flight/example_flight_server_test.go @@ -23,7 +23,7 @@ import ( "io" "log" - "github.com/apache/arrow/go/v14/arrow/flight" + "github.com/apache/arrow/go/v15/arrow/flight" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/credentials/insecure" diff --git a/go/arrow/flight/flight_middleware_test.go b/go/arrow/flight/flight_middleware_test.go index f2b240d1f2e45..e6faa1b7df1d2 100755 --- a/go/arrow/flight/flight_middleware_test.go +++ b/go/arrow/flight/flight_middleware_test.go @@ -23,8 +23,8 @@ import ( sync "sync" "testing" - "github.com/apache/arrow/go/v14/arrow/flight" - "github.com/apache/arrow/go/v14/arrow/internal/arrdata" + "github.com/apache/arrow/go/v15/arrow/flight" + "github.com/apache/arrow/go/v15/arrow/internal/arrdata" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "google.golang.org/grpc" diff --git a/go/arrow/flight/flight_test.go b/go/arrow/flight/flight_test.go index 7cd724b76b8ce..cd37658603d5b 100755 --- a/go/arrow/flight/flight_test.go +++ b/go/arrow/flight/flight_test.go @@ -23,11 +23,11 @@ import ( "io" "testing" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/flight" - "github.com/apache/arrow/go/v14/arrow/internal/arrdata" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/flight" + "github.com/apache/arrow/go/v15/arrow/internal/arrdata" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/credentials/insecure" diff --git a/go/arrow/flight/flightsql/client.go b/go/arrow/flight/flightsql/client.go index 13d046661e96d..c0c7e2cf20a28 100644 --- a/go/arrow/flight/flightsql/client.go +++ b/go/arrow/flight/flightsql/client.go @@ -22,12 +22,12 @@ import ( "fmt" "io" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/flight" - pb "github.com/apache/arrow/go/v14/arrow/flight/gen/flight" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/flight" + pb "github.com/apache/arrow/go/v15/arrow/flight/gen/flight" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" "google.golang.org/grpc" "google.golang.org/protobuf/proto" "google.golang.org/protobuf/types/known/anypb" diff --git a/go/arrow/flight/flightsql/client_test.go b/go/arrow/flight/flightsql/client_test.go index bb4ae29477997..3efe4ba4049b8 100644 --- a/go/arrow/flight/flightsql/client_test.go +++ b/go/arrow/flight/flightsql/client_test.go @@ -22,12 +22,12 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/flight" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql" - pb "github.com/apache/arrow/go/v14/arrow/flight/gen/flight" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/flight" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql" + pb "github.com/apache/arrow/go/v15/arrow/flight/gen/flight" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/suite" "google.golang.org/grpc" diff --git a/go/arrow/flight/flightsql/column_metadata.go b/go/arrow/flight/flightsql/column_metadata.go index d9785892888b2..8e0a69b99d055 100644 --- a/go/arrow/flight/flightsql/column_metadata.go +++ b/go/arrow/flight/flightsql/column_metadata.go @@ -19,7 +19,7 @@ package flightsql import ( "strconv" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) const ( diff --git a/go/arrow/flight/flightsql/driver/README.md b/go/arrow/flight/flightsql/driver/README.md index b8850527c19c1..f1447a7d24256 100644 --- a/go/arrow/flight/flightsql/driver/README.md +++ b/go/arrow/flight/flightsql/driver/README.md @@ -36,7 +36,7 @@ connection pooling, transactions combined with ease of use (see (#usage)). ## Prerequisites * Go 1.17+ -* Installation via `go get -u github.com/apache/arrow/go/v14/arrow/flight/flightsql` +* Installation via `go get -u github.com/apache/arrow/go/v15/arrow/flight/flightsql` * Backend speaking FlightSQL --------------------------------------- @@ -55,7 +55,7 @@ import ( "database/sql" "time" - _ "github.com/apache/arrow/go/v14/arrow/flight/flightsql" + _ "github.com/apache/arrow/go/v15/arrow/flight/flightsql" ) // Open the connection to an SQLite backend @@ -141,7 +141,7 @@ import ( "log" "time" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql" ) func main() { diff --git a/go/arrow/flight/flightsql/driver/config_test.go b/go/arrow/flight/flightsql/driver/config_test.go index abcececeaf214..6b24b535585d9 100644 --- a/go/arrow/flight/flightsql/driver/config_test.go +++ b/go/arrow/flight/flightsql/driver/config_test.go @@ -22,7 +22,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql/driver" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql/driver" ) func TestConfigTLSRegistry(t *testing.T) { diff --git a/go/arrow/flight/flightsql/driver/driver.go b/go/arrow/flight/flightsql/driver/driver.go index b4cb0d2dbda2e..e325489236c6d 100644 --- a/go/arrow/flight/flightsql/driver/driver.go +++ b/go/arrow/flight/flightsql/driver/driver.go @@ -25,11 +25,11 @@ import ( "sort" "time" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/flight" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/flight" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql" + "github.com/apache/arrow/go/v15/arrow/memory" "google.golang.org/grpc" "google.golang.org/grpc/credentials" diff --git a/go/arrow/flight/flightsql/driver/driver_test.go b/go/arrow/flight/flightsql/driver/driver_test.go index 4449b390974a7..a388bf155ec99 100644 --- a/go/arrow/flight/flightsql/driver/driver_test.go +++ b/go/arrow/flight/flightsql/driver/driver_test.go @@ -33,13 +33,13 @@ import ( "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/flight" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql/driver" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql/example" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/flight" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql/driver" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql/example" + "github.com/apache/arrow/go/v15/arrow/memory" ) const defaultTableName = "drivertest" diff --git a/go/arrow/flight/flightsql/driver/utils.go b/go/arrow/flight/flightsql/driver/utils.go index 99352e7251c53..022e9da4925f9 100644 --- a/go/arrow/flight/flightsql/driver/utils.go +++ b/go/arrow/flight/flightsql/driver/utils.go @@ -21,8 +21,8 @@ import ( "fmt" "time" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" ) // *** GRPC helpers *** diff --git a/go/arrow/flight/flightsql/driver/utils_test.go b/go/arrow/flight/flightsql/driver/utils_test.go index 355c888f56cc4..8f5ea031adf28 100644 --- a/go/arrow/flight/flightsql/driver/utils_test.go +++ b/go/arrow/flight/flightsql/driver/utils_test.go @@ -22,12 +22,12 @@ import ( "testing" "time" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/float16" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/require" ) diff --git a/go/arrow/flight/flightsql/example/cmd/sqlite_flightsql_server/main.go b/go/arrow/flight/flightsql/example/cmd/sqlite_flightsql_server/main.go index 6508833876fa5..1a50422f826f3 100644 --- a/go/arrow/flight/flightsql/example/cmd/sqlite_flightsql_server/main.go +++ b/go/arrow/flight/flightsql/example/cmd/sqlite_flightsql_server/main.go @@ -27,9 +27,9 @@ import ( "os" "strconv" - "github.com/apache/arrow/go/v14/arrow/flight" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql/example" + "github.com/apache/arrow/go/v15/arrow/flight" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql/example" ) func main() { diff --git a/go/arrow/flight/flightsql/example/sql_batch_reader.go b/go/arrow/flight/flightsql/example/sql_batch_reader.go index 7101b8f830a6f..36a0d7b424544 100644 --- a/go/arrow/flight/flightsql/example/sql_batch_reader.go +++ b/go/arrow/flight/flightsql/example/sql_batch_reader.go @@ -26,11 +26,11 @@ import ( "strings" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "google.golang.org/protobuf/types/known/wrapperspb" diff --git a/go/arrow/flight/flightsql/example/sqlite_info.go b/go/arrow/flight/flightsql/example/sqlite_info.go index bf29265853c65..c993af97f7449 100644 --- a/go/arrow/flight/flightsql/example/sqlite_info.go +++ b/go/arrow/flight/flightsql/example/sqlite_info.go @@ -20,8 +20,8 @@ package example import ( - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql" ) func SqlInfoResultMap() flightsql.SqlInfoResultMap { diff --git a/go/arrow/flight/flightsql/example/sqlite_server.go b/go/arrow/flight/flightsql/example/sqlite_server.go index 63b41ee5e0a4a..f06dd0210655f 100644 --- a/go/arrow/flight/flightsql/example/sqlite_server.go +++ b/go/arrow/flight/flightsql/example/sqlite_server.go @@ -45,13 +45,13 @@ import ( "strings" "sync" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/flight" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql/schema_ref" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/flight" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql/schema_ref" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/metadata" diff --git a/go/arrow/flight/flightsql/example/sqlite_tables_schema_batch_reader.go b/go/arrow/flight/flightsql/example/sqlite_tables_schema_batch_reader.go index 507abf622d890..e07a98fffdbf7 100644 --- a/go/arrow/flight/flightsql/example/sqlite_tables_schema_batch_reader.go +++ b/go/arrow/flight/flightsql/example/sqlite_tables_schema_batch_reader.go @@ -25,12 +25,12 @@ import ( "strings" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/flight" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/flight" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" sqlite3 "modernc.org/sqlite/lib" ) diff --git a/go/arrow/flight/flightsql/example/type_info.go b/go/arrow/flight/flightsql/example/type_info.go index 6d6ceb339e4b0..0180fc1f23af5 100644 --- a/go/arrow/flight/flightsql/example/type_info.go +++ b/go/arrow/flight/flightsql/example/type_info.go @@ -22,10 +22,10 @@ package example import ( "strings" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql/schema_ref" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql/schema_ref" + "github.com/apache/arrow/go/v15/arrow/memory" ) func GetTypeInfoResult(mem memory.Allocator) arrow.Record { diff --git a/go/arrow/flight/flightsql/schema_ref/reference_schemas.go b/go/arrow/flight/flightsql/schema_ref/reference_schemas.go index 98d61f8110d45..b65688306473b 100644 --- a/go/arrow/flight/flightsql/schema_ref/reference_schemas.go +++ b/go/arrow/flight/flightsql/schema_ref/reference_schemas.go @@ -18,7 +18,7 @@ // by FlightSQL servers and clients. package schema_ref -import "github.com/apache/arrow/go/v14/arrow" +import "github.com/apache/arrow/go/v15/arrow" var ( Catalogs = arrow.NewSchema( diff --git a/go/arrow/flight/flightsql/server.go b/go/arrow/flight/flightsql/server.go index e9cbc177da2c4..a086610433eae 100644 --- a/go/arrow/flight/flightsql/server.go +++ b/go/arrow/flight/flightsql/server.go @@ -20,14 +20,14 @@ import ( "context" "fmt" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/flight" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql/schema_ref" - pb "github.com/apache/arrow/go/v14/arrow/flight/gen/flight" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/flight" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql/schema_ref" + pb "github.com/apache/arrow/go/v15/arrow/flight/gen/flight" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "google.golang.org/protobuf/proto" diff --git a/go/arrow/flight/flightsql/server_test.go b/go/arrow/flight/flightsql/server_test.go index 33e9b8236a261..e444da4aaf4a2 100644 --- a/go/arrow/flight/flightsql/server_test.go +++ b/go/arrow/flight/flightsql/server_test.go @@ -22,12 +22,12 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/flight" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql" - pb "github.com/apache/arrow/go/v14/arrow/flight/gen/flight" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/flight" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql" + pb "github.com/apache/arrow/go/v15/arrow/flight/gen/flight" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/suite" "google.golang.org/grpc" "google.golang.org/grpc/codes" diff --git a/go/arrow/flight/flightsql/sql_info.go b/go/arrow/flight/flightsql/sql_info.go index a12c4fbdaeda9..5f78e1facd581 100644 --- a/go/arrow/flight/flightsql/sql_info.go +++ b/go/arrow/flight/flightsql/sql_info.go @@ -17,8 +17,8 @@ package flightsql import ( - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" ) const ( diff --git a/go/arrow/flight/flightsql/sqlite_server_test.go b/go/arrow/flight/flightsql/sqlite_server_test.go index 03ae3d7a8313a..e6fa798c55cf1 100644 --- a/go/arrow/flight/flightsql/sqlite_server_test.go +++ b/go/arrow/flight/flightsql/sqlite_server_test.go @@ -26,14 +26,14 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/flight" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql/example" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql/schema_ref" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/flight" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql/example" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql/schema_ref" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" "google.golang.org/grpc/codes" diff --git a/go/arrow/flight/flightsql/types.go b/go/arrow/flight/flightsql/types.go index 98086a61fe067..2b7419482e3e3 100644 --- a/go/arrow/flight/flightsql/types.go +++ b/go/arrow/flight/flightsql/types.go @@ -17,7 +17,7 @@ package flightsql import ( - pb "github.com/apache/arrow/go/v14/arrow/flight/gen/flight" + pb "github.com/apache/arrow/go/v15/arrow/flight/gen/flight" "google.golang.org/protobuf/proto" "google.golang.org/protobuf/types/known/anypb" ) diff --git a/go/arrow/flight/record_batch_reader.go b/go/arrow/flight/record_batch_reader.go index b726180627e6f..f137c75172368 100644 --- a/go/arrow/flight/record_batch_reader.go +++ b/go/arrow/flight/record_batch_reader.go @@ -21,12 +21,12 @@ import ( "fmt" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/arrio" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/arrio" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" ) // DataStreamReader is an interface for receiving flight data messages on a stream diff --git a/go/arrow/flight/record_batch_writer.go b/go/arrow/flight/record_batch_writer.go index 3ae17bcf48042..426dca6c3e907 100644 --- a/go/arrow/flight/record_batch_writer.go +++ b/go/arrow/flight/record_batch_writer.go @@ -19,9 +19,9 @@ package flight import ( "bytes" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" ) // DataStreamWriter is an interface that represents an Arrow Flight stream diff --git a/go/arrow/flight/server.go b/go/arrow/flight/server.go index cf471f9b3a2b6..8676b15644e47 100644 --- a/go/arrow/flight/server.go +++ b/go/arrow/flight/server.go @@ -22,7 +22,7 @@ import ( "os" "os/signal" - "github.com/apache/arrow/go/v14/arrow/flight/gen/flight" + "github.com/apache/arrow/go/v15/arrow/flight/gen/flight" "google.golang.org/grpc" ) diff --git a/go/arrow/flight/server_example_test.go b/go/arrow/flight/server_example_test.go index b82e2cb7f7164..4e2e4f4c6b587 100644 --- a/go/arrow/flight/server_example_test.go +++ b/go/arrow/flight/server_example_test.go @@ -21,7 +21,7 @@ import ( "fmt" "net" - "github.com/apache/arrow/go/v14/arrow/flight" + "github.com/apache/arrow/go/v15/arrow/flight" "google.golang.org/grpc" "google.golang.org/grpc/credentials/insecure" "google.golang.org/grpc/health" diff --git a/go/arrow/float16/float16.go b/go/arrow/float16/float16.go index 4e03d13df0cae..e0192495eb971 100644 --- a/go/arrow/float16/float16.go +++ b/go/arrow/float16/float16.go @@ -17,6 +17,7 @@ package float16 import ( + "encoding/binary" "math" "strconv" ) @@ -29,6 +30,11 @@ type Num struct { bits uint16 } +var ( + MaxNum = Num{bits: 0b0111101111111111} + MinNum = MaxNum.Negate() +) + // New creates a new half-precision floating point value from the provided // float32 value. func New(f float32) Num { @@ -86,6 +92,11 @@ func (n Num) Div(rhs Num) Num { return New(n.Float32() / rhs.Float32()) } +// Equal returns true if the value represented by n is == other +func (n Num) Equal(other Num) bool { + return n.Float32() == other.Float32() +} + // Greater returns true if the value represented by n is > other func (n Num) Greater(other Num) bool { return n.Float32() > other.Float32() @@ -152,14 +163,39 @@ func (n Num) Abs() Num { } func (n Num) Sign() int { - f := n.Float32() - if f > 0 { - return 1 - } else if f == 0 { + if n.IsZero() { return 0 + } else if n.Signbit() { + return -1 } - return -1 + return 1 } +func (n Num) Signbit() bool { return (n.bits & 0x8000) != 0 } + +func (n Num) IsNaN() bool { return (n.bits & 0x7fff) > 0x7c00 } + +func (n Num) IsZero() bool { return (n.bits & 0x7fff) == 0 } + func (f Num) Uint16() uint16 { return f.bits } func (f Num) String() string { return strconv.FormatFloat(float64(f.Float32()), 'g', -1, 32) } + +func Inf() Num { return Num{bits: 0x7c00} } + +func NaN() Num { return Num{bits: 0x7fff} } + +func FromBits(src uint16) Num { return Num{bits: src} } + +func FromLEBytes(src []byte) Num { + return Num{bits: binary.LittleEndian.Uint16(src)} +} + +func (f Num) PutLEBytes(dst []byte) { + binary.LittleEndian.PutUint16(dst, f.bits) +} + +func (f Num) ToLEBytes() []byte { + dst := make([]byte, 2) + f.PutLEBytes(dst) + return dst +} diff --git a/go/arrow/float16/float16_test.go b/go/arrow/float16/float16_test.go index 55c3ea8b30404..cfde440c5f9e4 100644 --- a/go/arrow/float16/float16_test.go +++ b/go/arrow/float16/float16_test.go @@ -238,6 +238,7 @@ func TestSign(t *testing.T) { }{ {Num{bits: 0x4580}, 1}, // 5.5 {Num{bits: 0x0000}, 0}, // 0 + {Num{bits: 0x8000}, 0}, // -0 {Num{bits: 0xC580}, -1}, // -5.5 } { t.Run("sign", func(t *testing.T) { @@ -248,3 +249,45 @@ func TestSign(t *testing.T) { }) } } + +func TestSignbit(t *testing.T) { + for _, tc := range []struct { + n Num + want bool + }{ + {Num{bits: 0x4580}, false}, // 5.5 + {Num{bits: 0x0000}, false}, // 0 + {Num{bits: 0x8000}, true}, // -0 + {Num{bits: 0xC580}, true}, // -5.5 + } { + t.Run("signbit", func(t *testing.T) { + n := tc.n.Signbit() + if got, want := n, tc.want; got != want { + t.Fatalf("invalid value. got=%v, want=%v", got, want) + } + }) + } +} + +func TestIsNaN(t *testing.T) { + for _, tc := range []struct { + n Num + want bool + }{ + {NaN(), true}, + {NaN().Negate(), true}, + {Inf(), false}, + {Inf().Negate(), false}, + {Num{bits: 0x7c01}, true}, // nan + {Num{bits: 0xfc01}, true}, // -nan + {Num{bits: 0x7e00}, true}, // nan + {Num{bits: 0xfe00}, true}, // -nan + } { + t.Run("isnan", func(t *testing.T) { + n := tc.n.IsNaN() + if got, want := n, tc.want; got != want { + t.Fatalf("invalid value. got=%v, want=%v", got, want) + } + }) + } +} diff --git a/go/arrow/internal/arrdata/arrdata.go b/go/arrow/internal/arrdata/arrdata.go index 0851bff0fe0da..985388094eb51 100644 --- a/go/arrow/internal/arrdata/arrdata.go +++ b/go/arrow/internal/arrdata/arrdata.go @@ -21,14 +21,14 @@ import ( "fmt" "sort" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/float16" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/types" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/types" ) var ( @@ -54,6 +54,7 @@ func init() { Records["extension"] = makeExtensionRecords() Records["union"] = makeUnionRecords() Records["run_end_encoded"] = makeRunEndEncodedRecords() + Records["view_types"] = makeStringViewRecords() for k := range Records { RecordNames = append(RecordNames, k) @@ -1155,6 +1156,65 @@ func makeRunEndEncodedRecords() []arrow.Record { return recs } +func makeStringViewRecords() []arrow.Record { + mem := memory.NewGoAllocator() + schema := arrow.NewSchema([]arrow.Field{ + {Name: "binary_view", Type: arrow.BinaryTypes.BinaryView, Nullable: true}, + {Name: "string_view", Type: arrow.BinaryTypes.StringView, Nullable: true}, + }, nil) + + mask := []bool{true, false, false, true, true} + chunks := [][]arrow.Array{ + { + viewTypeArrayOf(mem, [][]byte{[]byte("1é"), []byte("2"), []byte("3"), []byte("4"), []byte("5")}, mask), + viewTypeArrayOf(mem, []string{"1é", "2", "3", "4", "5"}, mask), + }, + { + viewTypeArrayOf(mem, [][]byte{[]byte("1é"), []byte("22222222222222"), []byte("33333333333333"), []byte("4444"), []byte("5555")}, mask), + viewTypeArrayOf(mem, []string{"1é", "22222222222222", "33333333333333", "4444", "5555"}, nil), + }, + { + viewTypeArrayOf(mem, [][]byte{[]byte("1é1é"), []byte("22222222222222"), []byte("33333333333333"), []byte("44"), []byte("55")}, nil), + viewTypeArrayOf(mem, []string{"1é1é", "22222222222222", "33333333333333", "44", "55"}, mask), + }, + } + + defer func() { + for _, chunk := range chunks { + for _, col := range chunk { + col.Release() + } + } + }() + + recs := make([]arrow.Record, len(chunks)) + for i, chunk := range chunks { + recs[i] = array.NewRecord(schema, chunk, -1) + } + + return recs +} + +func viewTypeArrayOf(mem memory.Allocator, a interface{}, valids []bool) arrow.Array { + if mem == nil { + mem = memory.NewGoAllocator() + } + + switch a := a.(type) { + case []string: + bldr := array.NewStringViewBuilder(mem) + defer bldr.Release() + bldr.AppendValues(a, valids) + return bldr.NewArray() + case [][]byte: + bldr := array.NewBinaryViewBuilder(mem) + defer bldr.Release() + bldr.AppendValues(a, valids) + return bldr.NewArray() + } + return nil +} + func extArray(mem memory.Allocator, dt arrow.ExtensionType, a interface{}, valids []bool) arrow.Array { var storage arrow.Array switch st := dt.StorageType().(type) { @@ -1750,5 +1810,26 @@ func buildArray(bldr array.Builder, data arrow.Array) { bldr.AppendNull() } } + + case *array.BinaryViewBuilder: + data := data.(*array.BinaryView) + for i := 0; i < data.Len(); i++ { + switch { + case data.IsValid(i): + bldr.Append(data.Value(i)) + default: + bldr.AppendNull() + } + } + case *array.StringViewBuilder: + data := data.(*array.StringView) + for i := 0; i < data.Len(); i++ { + switch { + case data.IsValid(i): + bldr.Append(data.Value(i)) + default: + bldr.AppendNull() + } + } } } diff --git a/go/arrow/internal/arrdata/ioutil.go b/go/arrow/internal/arrdata/ioutil.go index 1a2ce0e5b5eca..a6becc2151ef3 100644 --- a/go/arrow/internal/arrdata/ioutil.go +++ b/go/arrow/internal/arrdata/ioutil.go @@ -23,11 +23,11 @@ import ( "sync" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/internal/flatbuf" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/internal/flatbuf" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" ) // CheckArrowFile checks whether a given ARROW file contains the expected list of records. diff --git a/go/arrow/internal/arrjson/arrjson.go b/go/arrow/internal/arrjson/arrjson.go index ad87b73fc4ddb..f74b615362642 100644 --- a/go/arrow/internal/arrjson/arrjson.go +++ b/go/arrow/internal/arrjson/arrjson.go @@ -26,16 +26,16 @@ import ( "strconv" "strings" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/float16" - "github.com/apache/arrow/go/v14/arrow/internal/dictutils" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/arrow/internal/dictutils" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" ) type Schema struct { @@ -158,6 +158,10 @@ func typeToJSON(arrowType arrow.DataType) (json.RawMessage, error) { typ = nameJSON{"utf8"} case *arrow.LargeStringType: typ = nameJSON{"largeutf8"} + case *arrow.BinaryViewType: + typ = nameJSON{"binaryview"} + case *arrow.StringViewType: + typ = nameJSON{"utf8view"} case *arrow.Date32Type: typ = unitZoneJSON{Name: "date", Unit: "DAY"} case *arrow.Date64Type: @@ -342,6 +346,10 @@ func typeFromJSON(typ json.RawMessage, children []FieldWrapper) (arrowType arrow arrowType = arrow.BinaryTypes.String case "largeutf8": arrowType = arrow.BinaryTypes.LargeString + case "binaryview": + arrowType = arrow.BinaryTypes.BinaryView + case "utf8view": + arrowType = arrow.BinaryTypes.StringView case "date": t := unitZoneJSON{} if err = json.Unmarshal(typ, &t); err != nil { @@ -818,6 +826,7 @@ type Array struct { Offset interface{} `json:"OFFSET,omitempty"` Size interface{} `json:"SIZE,omitempty"` Children []Array `json:"children,omitempty"` + Variadic []string `json:"VARIADIC_BUFFERS,omitempty"` } func (a *Array) MarshalJSON() ([]byte, error) { @@ -1078,6 +1087,18 @@ func arrayFromJSON(mem memory.Allocator, dt arrow.DataType, arr Array) arrow.Arr bldr.AppendValues(data, valids) return returnNewArrayData(bldr) + case arrow.BinaryViewDataType: + valids := validsToBitmap(validsFromJSON(arr.Valids), mem) + nulls := arr.Count - bitutil.CountSetBits(valids.Bytes(), 0, arr.Count) + headers := stringHeadersFromJSON(mem, !dt.IsUtf8(), arr.Data) + extraBufs := variadicBuffersFromJSON(arr.Variadic) + defer valids.Release() + defer headers.Release() + + return array.NewData(dt, arr.Count, + append([]*memory.Buffer{valids, headers}, extraBufs...), + nil, nulls, 0) + case *arrow.ListType: valids := validsFromJSON(arr.Valids) elems := arrayFromJSON(mem, dt.Elem(), arr.Children[0]) @@ -1486,6 +1507,24 @@ func arrayToJSON(field arrow.Field, arr arrow.Array) Array { Offset: strOffsets, } + case *array.StringView: + variadic := variadicBuffersToJSON(arr.Data().Buffers()[2:]) + return Array{ + Name: field.Name, + Count: arr.Len(), + Valids: validsToJSON(arr), + Data: stringHeadersToJSON(arr, false), + Variadic: variadic, + } + case *array.BinaryView: + variadic := variadicBuffersToJSON(arr.Data().Buffers()[2:]) + return Array{ + Name: field.Name, + Count: arr.Len(), + Valids: validsToJSON(arr), + Data: stringHeadersToJSON(arr, true), + Variadic: variadic, + } case *array.List: o := Array{ Name: field.Name, @@ -2309,3 +2348,114 @@ func durationToJSON(arr *array.Duration) []interface{} { } return o } + +func variadicBuffersFromJSON(bufs []string) []*memory.Buffer { + out := make([]*memory.Buffer, len(bufs)) + for i, data := range bufs { + rawData, err := hex.DecodeString(data) + if err != nil { + panic(err) + } + + out[i] = memory.NewBufferBytes(rawData) + } + return out +} + +func variadicBuffersToJSON(bufs []*memory.Buffer) []string { + out := make([]string, len(bufs)) + for i, data := range bufs { + out[i] = strings.ToUpper(hex.EncodeToString(data.Bytes())) + } + return out +} + +func stringHeadersFromJSON(mem memory.Allocator, isBinary bool, data []interface{}) *memory.Buffer { + buf := memory.NewResizableBuffer(mem) + buf.Resize(arrow.ViewHeaderTraits.BytesRequired(len(data))) + + values := arrow.ViewHeaderTraits.CastFromBytes(buf.Bytes()) + + for i, d := range data { + switch v := d.(type) { + case nil: + continue + case map[string]interface{}: + if inlined, ok := v["INLINED"]; ok { + if isBinary { + val, err := hex.DecodeString(inlined.(string)) + if err != nil { + panic(fmt.Errorf("could not decode %v: %v", inlined, err)) + } + values[i].SetBytes(val) + } else { + values[i].SetString(inlined.(string)) + } + continue + } + + idx, offset := v["BUFFER_INDEX"].(json.Number), v["OFFSET"].(json.Number) + bufIdx, err := idx.Int64() + if err != nil { + panic(err) + } + + bufOffset, err := offset.Int64() + if err != nil { + panic(err) + } + + values[i].SetIndexOffset(int32(bufIdx), int32(bufOffset)) + prefix, err := hex.DecodeString(v["PREFIX"].(string)) + if err != nil { + panic(err) + } + sz, err := v["SIZE"].(json.Number).Int64() + if err != nil { + panic(err) + } + + rawData := make([]byte, sz) + copy(rawData, prefix) + values[i].SetBytes(rawData) + } + } + return buf +} + +func stringHeadersToJSON(arr array.ViewLike, isBinary bool) []interface{} { + type StringHeader struct { + Size int `json:"SIZE"` + Prefix *string `json:"PREFIX,omitempty"` + BufferIdx *int `json:"BUFFER_INDEX,omitempty"` + BufferOff *int `json:"OFFSET,omitempty"` + Inlined *string `json:"INLINED,omitempty"` + } + + o := make([]interface{}, arr.Len()) + for i := range o { + hdr := arr.ValueHeader(i) + if hdr.IsInline() { + data := hdr.InlineString() + if isBinary { + data = strings.ToUpper(hex.EncodeToString(hdr.InlineBytes())) + } + o[i] = StringHeader{ + Size: hdr.Len(), + Inlined: &data, + } + continue + } + + idx, off := int(hdr.BufferIndex()), int(hdr.BufferOffset()) + prefix := hdr.Prefix() + encodedPrefix := strings.ToUpper(hex.EncodeToString(prefix[:])) + o[i] = StringHeader{ + Size: hdr.Len(), + Prefix: &encodedPrefix, + BufferIdx: &idx, + BufferOff: &off, + } + } + return o +} diff --git a/go/arrow/internal/arrjson/arrjson_test.go b/go/arrow/internal/arrjson/arrjson_test.go index ee85d431805ab..31f3cb238ec16 100644 --- a/go/arrow/internal/arrjson/arrjson_test.go +++ b/go/arrow/internal/arrjson/arrjson_test.go @@ -22,9 +22,9 @@ import ( "os" "testing" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/internal/arrdata" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/internal/arrdata" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) @@ -48,6 +48,7 @@ func TestReadWrite(t *testing.T) { wantJSONs["dictionary"] = makeDictionaryWantJSONs() wantJSONs["union"] = makeUnionWantJSONs() wantJSONs["run_end_encoded"] = makeRunEndEncodedWantJSONs() + wantJSONs["view_types"] = makeViewTypesWantJSONs() tempDir := t.TempDir() for name, recs := range arrdata.Records { @@ -6127,3 +6128,261 @@ func makeRunEndEncodedWantJSONs() string { ] }` } + +func makeViewTypesWantJSONs() string { + return `{ + "schema": { + "fields": [ + { + "name": "binary_view", + "type": { + "name": "binaryview" + }, + "nullable": true, + "children": [] + }, + { + "name": "string_view", + "type": { + "name": "utf8view" + }, + "nullable": true, + "children": [] + } + ] + }, + "batches": [ + { + "count": 5, + "columns": [ + { + "name": "binary_view", + "count": 5, + "VALIDITY": [ + 1, + 0, + 0, + 1, + 1 + ], + "DATA": [ + { + "SIZE": 3, + "INLINED": "31C3A9" + }, + { + "SIZE": 0, + "INLINED": "" + }, + { + "SIZE": 0, + "INLINED": "" + }, + { + "SIZE": 1, + "INLINED": "34" + }, + { + "SIZE": 1, + "INLINED": "35" + } + ], + "VARIADIC_BUFFERS": [""] + }, + { + "name": "string_view", + "count": 5, + "VALIDITY": [ + 1, + 0, + 0, + 1, + 1 + ], + "DATA": [ + { + "SIZE": 3, + "INLINED": "1é" + }, + { + "SIZE": 0, + "INLINED": "" + }, + { + "SIZE": 0, + "INLINED": "" + }, + { + "SIZE": 1, + "INLINED": "4" + }, + { + "SIZE": 1, + "INLINED": "5" + } + ], + "VARIADIC_BUFFERS": [""] + } + ] + }, + { + "count": 5, + "columns": [ + { + "name": "binary_view", + "count": 5, + "VALIDITY": [ + 1, + 0, + 0, + 1, + 1 + ], + "DATA": [ + { + "SIZE": 3, + "INLINED": "31C3A9" + }, + { + "SIZE": 0, + "INLINED": "" + }, + { + "SIZE": 0, + "INLINED": "" + }, + { + "SIZE": 4, + "INLINED": "34343434" + }, + { + "SIZE": 4, + "INLINED": "35353535" + } + ], + "VARIADIC_BUFFERS": [""] + }, + { + "name": "string_view", + "count": 5, + "VALIDITY": [ + 1, + 1, + 1, + 1, + 1 + ], + "DATA": [ + { + "SIZE": 3, + "INLINED": "1é" + }, + { + "SIZE": 14, + "PREFIX": "32323232", + "BUFFER_INDEX": 0, + "OFFSET": 0 + }, + { + "SIZE": 14, + "PREFIX": "33333333", + "BUFFER_INDEX": 0, + "OFFSET": 14 + }, + { + "SIZE": 4, + "INLINED": "4444" + }, + { + "SIZE": 4, + "INLINED": "5555" + } + ], + "VARIADIC_BUFFERS": [ + "32323232323232323232323232323333333333333333333333333333" + ] + } + ] + }, + { + "count": 5, + "columns": [ + { + "name": "binary_view", + "count": 5, + "VALIDITY": [ + 1, + 1, + 1, + 1, + 1 + ], + "DATA": [ + { + "SIZE": 6, + "INLINED": "31C3A931C3A9" + }, + { + "SIZE": 14, + "PREFIX": "32323232", + "BUFFER_INDEX": 0, + "OFFSET": 0 + }, + { + "SIZE": 14, + "PREFIX": "33333333", + "BUFFER_INDEX": 0, + "OFFSET": 14 + }, + { + "SIZE": 2, + "INLINED": "3434" + }, + { + "SIZE": 2, + "INLINED": "3535" + } + ], + "VARIADIC_BUFFERS": [ + "32323232323232323232323232323333333333333333333333333333" + ] + }, + { + "name": "string_view", + "count": 5, + "VALIDITY": [ + 1, + 0, + 0, + 1, + 1 + ], + "DATA": [ + { + "SIZE": 6, + "INLINED": "1é1é" + }, + { + "SIZE": 0, + "INLINED": "" + }, + { + "SIZE": 0, + "INLINED": "" + }, + { + "SIZE": 2, + "INLINED": "44" + }, + { + "SIZE": 2, + "INLINED": "55" + } + ], + "VARIADIC_BUFFERS": [""] + } + ] + } + ] +}` +} diff --git a/go/arrow/internal/arrjson/option.go b/go/arrow/internal/arrjson/option.go index 7ae2058d471a7..47d40f857c54e 100644 --- a/go/arrow/internal/arrjson/option.go +++ b/go/arrow/internal/arrjson/option.go @@ -17,8 +17,8 @@ package arrjson import ( - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/memory" ) type config struct { diff --git a/go/arrow/internal/arrjson/reader.go b/go/arrow/internal/arrjson/reader.go index c8056ef1dc744..226fa1b1919f9 100644 --- a/go/arrow/internal/arrjson/reader.go +++ b/go/arrow/internal/arrjson/reader.go @@ -20,11 +20,11 @@ import ( "io" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/arrio" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/internal/dictutils" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/arrio" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/internal/dictutils" + "github.com/apache/arrow/go/v15/internal/json" ) type Reader struct { diff --git a/go/arrow/internal/arrjson/writer.go b/go/arrow/internal/arrjson/writer.go index ddac32609b604..af7032d581f4d 100644 --- a/go/arrow/internal/arrjson/writer.go +++ b/go/arrow/internal/arrjson/writer.go @@ -20,11 +20,11 @@ import ( "fmt" "io" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/arrio" - "github.com/apache/arrow/go/v14/arrow/internal/dictutils" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/arrio" + "github.com/apache/arrow/go/v15/arrow/internal/dictutils" + "github.com/apache/arrow/go/v15/internal/json" ) const ( diff --git a/go/arrow/internal/cdata_integration/entrypoints.go b/go/arrow/internal/cdata_integration/entrypoints.go index 629b8a762a689..a40db8316f848 100644 --- a/go/arrow/internal/cdata_integration/entrypoints.go +++ b/go/arrow/internal/cdata_integration/entrypoints.go @@ -25,10 +25,10 @@ import ( "runtime" "unsafe" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/cdata" - "github.com/apache/arrow/go/v14/arrow/internal/arrjson" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/cdata" + "github.com/apache/arrow/go/v15/arrow/internal/arrjson" + "github.com/apache/arrow/go/v15/arrow/memory" ) // #include diff --git a/go/arrow/internal/dictutils/dict.go b/go/arrow/internal/dictutils/dict.go index e09a2f4a0ecd1..d31369f7d25b3 100644 --- a/go/arrow/internal/dictutils/dict.go +++ b/go/arrow/internal/dictutils/dict.go @@ -21,9 +21,9 @@ import ( "fmt" "hash/maphash" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" ) type Kind int8 diff --git a/go/arrow/internal/dictutils/dict_test.go b/go/arrow/internal/dictutils/dict_test.go index 6cafacbd1df00..9d2f7ae4782c7 100644 --- a/go/arrow/internal/dictutils/dict_test.go +++ b/go/arrow/internal/dictutils/dict_test.go @@ -20,10 +20,10 @@ import ( "fmt" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/internal/dictutils" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/internal/dictutils" + "github.com/apache/arrow/go/v15/arrow/memory" ) func TestDictMemo(t *testing.T) { diff --git a/go/arrow/internal/flatbuf/MetadataVersion.go b/go/arrow/internal/flatbuf/MetadataVersion.go index 21b234f9c2b21..bb5e99dd588ad 100644 --- a/go/arrow/internal/flatbuf/MetadataVersion.go +++ b/go/arrow/internal/flatbuf/MetadataVersion.go @@ -31,7 +31,7 @@ const ( MetadataVersionV3 MetadataVersion = 2 /// >= 0.8.0 (December 2017). Non-backwards compatible with V3. MetadataVersionV4 MetadataVersion = 3 - /// >= 1.0.0 (July 2020. Backwards compatible with V4 (V5 readers can read V4 + /// >= 1.0.0 (July 2020). Backwards compatible with V4 (V5 readers can read V4 /// metadata and IPC messages). Implementations are recommended to provide a /// V4 compatibility mode with V5 format changes disabled. /// diff --git a/go/arrow/internal/flight_integration/cmd/arrow-flight-integration-client/main.go b/go/arrow/internal/flight_integration/cmd/arrow-flight-integration-client/main.go index 8da39858fabab..1afb53c5294f4 100755 --- a/go/arrow/internal/flight_integration/cmd/arrow-flight-integration-client/main.go +++ b/go/arrow/internal/flight_integration/cmd/arrow-flight-integration-client/main.go @@ -22,7 +22,7 @@ import ( "fmt" "time" - "github.com/apache/arrow/go/v14/arrow/internal/flight_integration" + "github.com/apache/arrow/go/v15/arrow/internal/flight_integration" "google.golang.org/grpc" "google.golang.org/grpc/credentials/insecure" ) diff --git a/go/arrow/internal/flight_integration/cmd/arrow-flight-integration-server/main.go b/go/arrow/internal/flight_integration/cmd/arrow-flight-integration-server/main.go index 9cebb2c695ece..d382ff9e88670 100644 --- a/go/arrow/internal/flight_integration/cmd/arrow-flight-integration-server/main.go +++ b/go/arrow/internal/flight_integration/cmd/arrow-flight-integration-server/main.go @@ -23,7 +23,7 @@ import ( "os" "syscall" - "github.com/apache/arrow/go/v14/arrow/internal/flight_integration" + "github.com/apache/arrow/go/v15/arrow/internal/flight_integration" ) var ( diff --git a/go/arrow/internal/flight_integration/scenario.go b/go/arrow/internal/flight_integration/scenario.go index 4f47d7fd506ee..3ec905e2d659c 100644 --- a/go/arrow/internal/flight_integration/scenario.go +++ b/go/arrow/internal/flight_integration/scenario.go @@ -31,15 +31,15 @@ import ( "strings" "time" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/flight" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql" - "github.com/apache/arrow/go/v14/arrow/flight/flightsql/schema_ref" - "github.com/apache/arrow/go/v14/arrow/internal/arrjson" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/types" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/flight" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql" + "github.com/apache/arrow/go/v15/arrow/flight/flightsql/schema_ref" + "github.com/apache/arrow/go/v15/arrow/internal/arrjson" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/types" "golang.org/x/xerrors" "google.golang.org/grpc" "google.golang.org/grpc/codes" diff --git a/go/arrow/internal/testing/gen/random_array_gen.go b/go/arrow/internal/testing/gen/random_array_gen.go index 41f2578209a7f..57b417bd2b878 100644 --- a/go/arrow/internal/testing/gen/random_array_gen.go +++ b/go/arrow/internal/testing/gen/random_array_gen.go @@ -19,11 +19,11 @@ package gen import ( "math" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" "golang.org/x/exp/rand" "gonum.org/v1/gonum/stat/distuv" ) @@ -351,6 +351,40 @@ func (r *RandomArrayGenerator) LargeString(size int64, minLength, maxLength int6 return bldr.NewArray() } +func (r *RandomArrayGenerator) StringView(size int64, minLength, maxLength int64, nullProb float64) arrow.Array { + return r.generateBinaryView(arrow.BinaryTypes.StringView, size, minLength, maxLength, nullProb) +} + +func (r *RandomArrayGenerator) generateBinaryView(dt arrow.DataType, size int64, minLength, maxLength int64, nullProb float64) arrow.Array { + lengths := r.Int32(size, int32(minLength), int32(maxLength), nullProb).(*array.Int32) + defer lengths.Release() + + bldr := array.NewBuilder(r.mem, dt).(array.StringLikeBuilder) + defer bldr.Release() + + r.extra++ + dist := rand.New(rand.NewSource(r.seed + r.extra)) + + buf := make([]byte, 0, maxLength) + gen := func(n int32) string { + out := buf[:n] + for i := range out { + out[i] = uint8(dist.Int31n(int32('z')-int32('A')+1) + int32('A')) + } + return string(out) + } + + for i := 0; i < lengths.Len(); i++ { + if lengths.IsNull(i) { + bldr.AppendNull() + continue + } + bldr.Append(gen(lengths.Value(i))) + } + + return bldr.NewArray() +} + func (r *RandomArrayGenerator) Numeric(dt arrow.Type, size int64, min, max int64, nullprob float64) arrow.Array { switch dt { case arrow.INT8: diff --git a/go/arrow/internal/testing/tools/bits_test.go b/go/arrow/internal/testing/tools/bits_test.go index 7d0fccb5f999e..6ad8ac5b12b68 100644 --- a/go/arrow/internal/testing/tools/bits_test.go +++ b/go/arrow/internal/testing/tools/bits_test.go @@ -20,7 +20,7 @@ import ( "fmt" "testing" - "github.com/apache/arrow/go/v14/arrow/internal/testing/tools" + "github.com/apache/arrow/go/v15/arrow/internal/testing/tools" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/internal/testing/tools/data_types.go b/go/arrow/internal/testing/tools/data_types.go index 654780bc3e853..161cfa431491d 100644 --- a/go/arrow/internal/testing/tools/data_types.go +++ b/go/arrow/internal/testing/tools/data_types.go @@ -21,8 +21,8 @@ package tools import ( "reflect" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/float16" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/float16" "golang.org/x/exp/constraints" ) diff --git a/go/arrow/internal/utils.go b/go/arrow/internal/utils.go index 265f030dfd19a..619eebd97dc78 100644 --- a/go/arrow/internal/utils.go +++ b/go/arrow/internal/utils.go @@ -17,8 +17,8 @@ package internal import ( - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/internal/flatbuf" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/internal/flatbuf" ) const CurMetadataVersion = flatbuf.MetadataVersionV5 diff --git a/go/arrow/ipc/cmd/arrow-cat/main.go b/go/arrow/ipc/cmd/arrow-cat/main.go index af53c9b4e5f44..0251b08c09bff 100644 --- a/go/arrow/ipc/cmd/arrow-cat/main.go +++ b/go/arrow/ipc/cmd/arrow-cat/main.go @@ -63,8 +63,8 @@ import ( "log" "os" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" ) func main() { diff --git a/go/arrow/ipc/cmd/arrow-cat/main_test.go b/go/arrow/ipc/cmd/arrow-cat/main_test.go index 22e4f6ffe0778..b6528b85b416d 100644 --- a/go/arrow/ipc/cmd/arrow-cat/main_test.go +++ b/go/arrow/ipc/cmd/arrow-cat/main_test.go @@ -23,10 +23,10 @@ import ( "os" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/internal/arrdata" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/internal/arrdata" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" ) func TestCatStream(t *testing.T) { diff --git a/go/arrow/ipc/cmd/arrow-file-to-stream/main.go b/go/arrow/ipc/cmd/arrow-file-to-stream/main.go index 7e86263d6d5b8..6187b94465211 100644 --- a/go/arrow/ipc/cmd/arrow-file-to-stream/main.go +++ b/go/arrow/ipc/cmd/arrow-file-to-stream/main.go @@ -24,9 +24,9 @@ import ( "log" "os" - "github.com/apache/arrow/go/v14/arrow/arrio" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/arrio" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" ) func main() { diff --git a/go/arrow/ipc/cmd/arrow-file-to-stream/main_test.go b/go/arrow/ipc/cmd/arrow-file-to-stream/main_test.go index 30db24715386f..2f66a8b48c59d 100644 --- a/go/arrow/ipc/cmd/arrow-file-to-stream/main_test.go +++ b/go/arrow/ipc/cmd/arrow-file-to-stream/main_test.go @@ -21,8 +21,8 @@ import ( "os" "testing" - "github.com/apache/arrow/go/v14/arrow/internal/arrdata" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/internal/arrdata" + "github.com/apache/arrow/go/v15/arrow/memory" ) func TestFileToStream(t *testing.T) { diff --git a/go/arrow/ipc/cmd/arrow-json-integration-test/main.go b/go/arrow/ipc/cmd/arrow-json-integration-test/main.go index 91ba6a4ad9b61..7db2fa1d1c5db 100644 --- a/go/arrow/ipc/cmd/arrow-json-integration-test/main.go +++ b/go/arrow/ipc/cmd/arrow-json-integration-test/main.go @@ -22,12 +22,12 @@ import ( "log" "os" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/arrio" - "github.com/apache/arrow/go/v14/arrow/internal/arrjson" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/internal/types" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/arrio" + "github.com/apache/arrow/go/v15/arrow/internal/arrjson" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/internal/types" ) func main() { diff --git a/go/arrow/ipc/cmd/arrow-json-integration-test/main_test.go b/go/arrow/ipc/cmd/arrow-json-integration-test/main_test.go index 140f9f62478c0..eb702f0aa2bc5 100644 --- a/go/arrow/ipc/cmd/arrow-json-integration-test/main_test.go +++ b/go/arrow/ipc/cmd/arrow-json-integration-test/main_test.go @@ -20,8 +20,8 @@ import ( "os" "testing" - "github.com/apache/arrow/go/v14/arrow/internal/arrdata" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/internal/arrdata" + "github.com/apache/arrow/go/v15/arrow/memory" ) func TestIntegration(t *testing.T) { diff --git a/go/arrow/ipc/cmd/arrow-ls/main.go b/go/arrow/ipc/cmd/arrow-ls/main.go index f739c9ae7da8e..4230ae2449985 100644 --- a/go/arrow/ipc/cmd/arrow-ls/main.go +++ b/go/arrow/ipc/cmd/arrow-ls/main.go @@ -61,8 +61,8 @@ import ( "log" "os" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" ) func main() { diff --git a/go/arrow/ipc/cmd/arrow-ls/main_test.go b/go/arrow/ipc/cmd/arrow-ls/main_test.go index 2c038dae97b52..bda1c2dc4d3dc 100644 --- a/go/arrow/ipc/cmd/arrow-ls/main_test.go +++ b/go/arrow/ipc/cmd/arrow-ls/main_test.go @@ -23,10 +23,10 @@ import ( "os" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/internal/arrdata" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/internal/arrdata" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" ) func TestLsStream(t *testing.T) { diff --git a/go/arrow/ipc/cmd/arrow-stream-to-file/main.go b/go/arrow/ipc/cmd/arrow-stream-to-file/main.go index 758231ea21cf5..7ed3f6a281d9f 100644 --- a/go/arrow/ipc/cmd/arrow-stream-to-file/main.go +++ b/go/arrow/ipc/cmd/arrow-stream-to-file/main.go @@ -24,9 +24,9 @@ import ( "log" "os" - "github.com/apache/arrow/go/v14/arrow/arrio" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/arrio" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" ) func main() { diff --git a/go/arrow/ipc/cmd/arrow-stream-to-file/main_test.go b/go/arrow/ipc/cmd/arrow-stream-to-file/main_test.go index 4354b46e8964b..73196551054ac 100644 --- a/go/arrow/ipc/cmd/arrow-stream-to-file/main_test.go +++ b/go/arrow/ipc/cmd/arrow-stream-to-file/main_test.go @@ -21,8 +21,8 @@ import ( "os" "testing" - "github.com/apache/arrow/go/v14/arrow/internal/arrdata" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/internal/arrdata" + "github.com/apache/arrow/go/v15/arrow/memory" ) func TestStreamToFile(t *testing.T) { diff --git a/go/arrow/ipc/compression.go b/go/arrow/ipc/compression.go index 73fb916500804..8856b732f9c5d 100644 --- a/go/arrow/ipc/compression.go +++ b/go/arrow/ipc/compression.go @@ -19,9 +19,9 @@ package ipc import ( "io" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/internal/flatbuf" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/internal/flatbuf" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/klauspost/compress/zstd" "github.com/pierrec/lz4/v4" ) diff --git a/go/arrow/ipc/endian_swap.go b/go/arrow/ipc/endian_swap.go index d98fec1089f04..35ba0e4e764f9 100644 --- a/go/arrow/ipc/endian_swap.go +++ b/go/arrow/ipc/endian_swap.go @@ -18,11 +18,12 @@ package ipc import ( "errors" + "fmt" "math/bits" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" ) // swap the endianness of the array's buffers as needed in-place to save @@ -119,7 +120,10 @@ func swapType(dt arrow.DataType, data *array.Data) (err error) { return swapType(dt.IndexType, data) case arrow.FixedWidthDataType: byteSwapBuffer(dt.BitWidth(), data.Buffers()[1]) + default: + err = fmt.Errorf("%w: swapping endianness of %s", arrow.ErrNotImplemented, dt) } + return } diff --git a/go/arrow/ipc/endian_swap_test.go b/go/arrow/ipc/endian_swap_test.go index 4f02bb53d61a4..1c724103f4441 100644 --- a/go/arrow/ipc/endian_swap_test.go +++ b/go/arrow/ipc/endian_swap_test.go @@ -20,11 +20,11 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/endian" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/types" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/types" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/go/arrow/ipc/file_reader.go b/go/arrow/ipc/file_reader.go index 10cb2cae764e6..1c7eb31799cfa 100644 --- a/go/arrow/ipc/file_reader.go +++ b/go/arrow/ipc/file_reader.go @@ -23,14 +23,14 @@ import ( "fmt" "io" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/endian" - "github.com/apache/arrow/go/v14/arrow/internal" - "github.com/apache/arrow/go/v14/arrow/internal/dictutils" - "github.com/apache/arrow/go/v14/arrow/internal/flatbuf" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/internal" + "github.com/apache/arrow/go/v15/arrow/internal/dictutils" + "github.com/apache/arrow/go/v15/arrow/internal/flatbuf" + "github.com/apache/arrow/go/v15/arrow/memory" ) // FileReader is an Arrow file reader. @@ -430,13 +430,18 @@ func (src *ipcSource) fieldMetadata(i int) *flatbuf.FieldNode { return &node } +func (src *ipcSource) variadicCount(i int) int64 { + return src.meta.VariadicBufferCounts(i) +} + type arrayLoaderContext struct { - src ipcSource - ifield int - ibuffer int - max int - memo *dictutils.Memo - version MetadataVersion + src ipcSource + ifield int + ibuffer int + ivariadic int + max int + memo *dictutils.Memo + version MetadataVersion } func (ctx *arrayLoaderContext) field() *flatbuf.FieldNode { @@ -451,6 +456,12 @@ func (ctx *arrayLoaderContext) buffer() *memory.Buffer { return buf } +func (ctx *arrayLoaderContext) variadic() int64 { + v := ctx.src.variadicCount(ctx.ivariadic) + ctx.ivariadic++ + return v +} + func (ctx *arrayLoaderContext) loadArray(dt arrow.DataType) arrow.ArrayData { switch dt := dt.(type) { case *arrow.NullType: @@ -476,6 +487,9 @@ func (ctx *arrayLoaderContext) loadArray(dt arrow.DataType) arrow.ArrayData { case *arrow.BinaryType, *arrow.StringType, *arrow.LargeStringType, *arrow.LargeBinaryType: return ctx.loadBinary(dt) + case arrow.BinaryViewDataType: + return ctx.loadBinaryView(dt) + case *arrow.FixedSizeBinaryType: return ctx.loadFixedSizeBinary(dt) @@ -582,6 +596,18 @@ func (ctx *arrayLoaderContext) loadBinary(dt arrow.DataType) arrow.ArrayData { return array.NewData(dt, int(field.Length()), buffers, nil, int(field.NullCount()), 0) } +func (ctx *arrayLoaderContext) loadBinaryView(dt arrow.DataType) arrow.ArrayData { + nVariadicBufs := ctx.variadic() + field, buffers := ctx.loadCommon(dt.ID(), 2+int(nVariadicBufs)) + buffers = append(buffers, ctx.buffer()) + for i := 0; i < int(nVariadicBufs); i++ { + buffers = append(buffers, ctx.buffer()) + } + defer releaseBuffers(buffers) + + return array.NewData(dt, int(field.Length()), buffers, nil, int(field.NullCount()), 0) +} + func (ctx *arrayLoaderContext) loadFixedSizeBinary(dt *arrow.FixedSizeBinaryType) arrow.ArrayData { field, buffers := ctx.loadCommon(dt.ID(), 2) buffers = append(buffers, ctx.buffer()) diff --git a/go/arrow/ipc/file_test.go b/go/arrow/ipc/file_test.go index 1b22719dc8097..5f4dac1f899bb 100644 --- a/go/arrow/ipc/file_test.go +++ b/go/arrow/ipc/file_test.go @@ -21,9 +21,9 @@ import ( "os" "testing" - "github.com/apache/arrow/go/v14/arrow/internal/arrdata" - "github.com/apache/arrow/go/v14/arrow/internal/flatbuf" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/internal/arrdata" + "github.com/apache/arrow/go/v15/arrow/internal/flatbuf" + "github.com/apache/arrow/go/v15/arrow/memory" ) func TestFile(t *testing.T) { diff --git a/go/arrow/ipc/file_writer.go b/go/arrow/ipc/file_writer.go index 12384225b70a1..55e4d7c2dc22c 100644 --- a/go/arrow/ipc/file_writer.go +++ b/go/arrow/ipc/file_writer.go @@ -21,11 +21,11 @@ import ( "fmt" "io" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/internal/dictutils" - "github.com/apache/arrow/go/v14/arrow/internal/flatbuf" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/internal/dictutils" + "github.com/apache/arrow/go/v15/arrow/internal/flatbuf" + "github.com/apache/arrow/go/v15/arrow/memory" ) // PayloadWriter is an interface for injecting a different payloadwriter diff --git a/go/arrow/ipc/ipc.go b/go/arrow/ipc/ipc.go index 6c04b6f5ad2d6..8cf56e8331fcb 100644 --- a/go/arrow/ipc/ipc.go +++ b/go/arrow/ipc/ipc.go @@ -19,10 +19,10 @@ package ipc import ( "io" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/arrio" - "github.com/apache/arrow/go/v14/arrow/internal/flatbuf" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/arrio" + "github.com/apache/arrow/go/v15/arrow/internal/flatbuf" + "github.com/apache/arrow/go/v15/arrow/memory" ) const ( diff --git a/go/arrow/ipc/ipc_test.go b/go/arrow/ipc/ipc_test.go index 551d325dc1212..d02d8734b56d4 100644 --- a/go/arrow/ipc/ipc_test.go +++ b/go/arrow/ipc/ipc_test.go @@ -29,10 +29,10 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" ) func TestArrow12072(t *testing.T) { diff --git a/go/arrow/ipc/message.go b/go/arrow/ipc/message.go index c5d0ec68d19c4..5295c5df30137 100644 --- a/go/arrow/ipc/message.go +++ b/go/arrow/ipc/message.go @@ -22,20 +22,20 @@ import ( "io" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/internal/flatbuf" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/internal/flatbuf" + "github.com/apache/arrow/go/v15/arrow/memory" ) // MetadataVersion represents the Arrow metadata version. type MetadataVersion flatbuf.MetadataVersion const ( - MetadataV1 = MetadataVersion(flatbuf.MetadataVersionV1) // version for Arrow-0.1.0 - MetadataV2 = MetadataVersion(flatbuf.MetadataVersionV2) // version for Arrow-0.2.0 - MetadataV3 = MetadataVersion(flatbuf.MetadataVersionV3) // version for Arrow-0.3.0 to 0.7.1 - MetadataV4 = MetadataVersion(flatbuf.MetadataVersionV4) // version for >= Arrow-0.8.0 - MetadataV5 = MetadataVersion(flatbuf.MetadataVersionV5) // version for >= Arrow-1.0.0, backward compatible with v4 + MetadataV1 = MetadataVersion(flatbuf.MetadataVersionV1) // version for Arrow Format-0.1.0 + MetadataV2 = MetadataVersion(flatbuf.MetadataVersionV2) // version for Arrow Format-0.2.0 + MetadataV3 = MetadataVersion(flatbuf.MetadataVersionV3) // version for Arrow Format-0.3.0 to 0.7.1 + MetadataV4 = MetadataVersion(flatbuf.MetadataVersionV4) // version for >= Arrow Format-0.8.0 + MetadataV5 = MetadataVersion(flatbuf.MetadataVersionV5) // version for >= Arrow Format-1.0.0, backward compatible with v4 ) func (m MetadataVersion) String() string { diff --git a/go/arrow/ipc/message_test.go b/go/arrow/ipc/message_test.go index cc41fdd3fe13d..912d112229f0c 100644 --- a/go/arrow/ipc/message_test.go +++ b/go/arrow/ipc/message_test.go @@ -22,9 +22,9 @@ import ( "io" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" ) func TestMessageReaderBodyInAllocator(t *testing.T) { diff --git a/go/arrow/ipc/metadata.go b/go/arrow/ipc/metadata.go index 9bab47d6fa0cd..54ef58753a173 100644 --- a/go/arrow/ipc/metadata.go +++ b/go/arrow/ipc/metadata.go @@ -23,11 +23,11 @@ import ( "io" "sort" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/endian" - "github.com/apache/arrow/go/v14/arrow/internal/dictutils" - "github.com/apache/arrow/go/v14/arrow/internal/flatbuf" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/internal/dictutils" + "github.com/apache/arrow/go/v15/arrow/internal/flatbuf" + "github.com/apache/arrow/go/v15/arrow/memory" flatbuffers "github.com/google/flatbuffers/go" ) @@ -323,6 +323,16 @@ func (fv *fieldVisitor) visit(field arrow.Field) { flatbuf.LargeUtf8Start(fv.b) fv.offset = flatbuf.LargeUtf8End(fv.b) + case *arrow.BinaryViewType: + fv.dtype = flatbuf.TypeBinaryView + flatbuf.BinaryViewStart(fv.b) + fv.offset = flatbuf.BinaryViewEnd(fv.b) + + case *arrow.StringViewType: + fv.dtype = flatbuf.TypeUtf8View + flatbuf.Utf8ViewStart(fv.b) + fv.offset = flatbuf.Utf8ViewEnd(fv.b) + case *arrow.Date32Type: fv.dtype = flatbuf.TypeDate flatbuf.DateStart(fv.b) @@ -713,6 +723,12 @@ func concreteTypeFromFB(typ flatbuf.Type, data flatbuffers.Table, children []arr case flatbuf.TypeLargeUtf8: return arrow.BinaryTypes.LargeString, nil + case flatbuf.TypeUtf8View: + return arrow.BinaryTypes.StringView, nil + + case flatbuf.TypeBinaryView: + return arrow.BinaryTypes.BinaryView, nil + case flatbuf.TypeBool: return arrow.FixedWidthTypes.Boolean, nil @@ -1168,15 +1184,15 @@ func writeFileFooter(schema *arrow.Schema, dicts, recs []fileBlock, w io.Writer) return err } -func writeRecordMessage(mem memory.Allocator, size, bodyLength int64, fields []fieldMetadata, meta []bufferMetadata, codec flatbuf.CompressionType) *memory.Buffer { +func writeRecordMessage(mem memory.Allocator, size, bodyLength int64, fields []fieldMetadata, meta []bufferMetadata, codec flatbuf.CompressionType, variadicCounts []int64) *memory.Buffer { b := flatbuffers.NewBuilder(0) - recFB := recordToFB(b, size, bodyLength, fields, meta, codec) + recFB := recordToFB(b, size, bodyLength, fields, meta, codec, variadicCounts) return writeMessageFB(b, mem, flatbuf.MessageHeaderRecordBatch, recFB, bodyLength) } -func writeDictionaryMessage(mem memory.Allocator, id int64, isDelta bool, size, bodyLength int64, fields []fieldMetadata, meta []bufferMetadata, codec flatbuf.CompressionType) *memory.Buffer { +func writeDictionaryMessage(mem memory.Allocator, id int64, isDelta bool, size, bodyLength int64, fields []fieldMetadata, meta []bufferMetadata, codec flatbuf.CompressionType, variadicCounts []int64) *memory.Buffer { b := flatbuffers.NewBuilder(0) - recFB := recordToFB(b, size, bodyLength, fields, meta, codec) + recFB := recordToFB(b, size, bodyLength, fields, meta, codec, variadicCounts) flatbuf.DictionaryBatchStart(b) flatbuf.DictionaryBatchAddId(b, id) @@ -1186,7 +1202,7 @@ func writeDictionaryMessage(mem memory.Allocator, id int64, isDelta bool, size, return writeMessageFB(b, mem, flatbuf.MessageHeaderDictionaryBatch, dictFB, bodyLength) } -func recordToFB(b *flatbuffers.Builder, size, bodyLength int64, fields []fieldMetadata, meta []bufferMetadata, codec flatbuf.CompressionType) flatbuffers.UOffsetT { +func recordToFB(b *flatbuffers.Builder, size, bodyLength int64, fields []fieldMetadata, meta []bufferMetadata, codec flatbuf.CompressionType, variadicCounts []int64) flatbuffers.UOffsetT { fieldsFB := writeFieldNodes(b, fields, flatbuf.RecordBatchStartNodesVector) metaFB := writeBuffers(b, meta, flatbuf.RecordBatchStartBuffersVector) var bodyCompressFB flatbuffers.UOffsetT @@ -1194,10 +1210,24 @@ func recordToFB(b *flatbuffers.Builder, size, bodyLength int64, fields []fieldMe bodyCompressFB = writeBodyCompression(b, codec) } + var vcFB *flatbuffers.UOffsetT + if len(variadicCounts) > 0 { + flatbuf.RecordBatchStartVariadicBufferCountsVector(b, len(variadicCounts)) + for i := len(variadicCounts) - 1; i >= 0; i-- { + b.PrependInt64(variadicCounts[i]) + } + vcFBVal := b.EndVector(len(variadicCounts)) + vcFB = &vcFBVal + } + flatbuf.RecordBatchStart(b) flatbuf.RecordBatchAddLength(b, size) flatbuf.RecordBatchAddNodes(b, fieldsFB) flatbuf.RecordBatchAddBuffers(b, metaFB) + if vcFB != nil { + flatbuf.RecordBatchAddVariadicBufferCounts(b, *vcFB) + } + if codec != -1 { flatbuf.RecordBatchAddCompression(b, bodyCompressFB) } diff --git a/go/arrow/ipc/metadata_test.go b/go/arrow/ipc/metadata_test.go index 3038abdf5f791..a35068813ba56 100644 --- a/go/arrow/ipc/metadata_test.go +++ b/go/arrow/ipc/metadata_test.go @@ -21,12 +21,12 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/internal/dictutils" - "github.com/apache/arrow/go/v14/arrow/internal/flatbuf" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/types" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/internal/dictutils" + "github.com/apache/arrow/go/v15/arrow/internal/flatbuf" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/types" flatbuffers "github.com/google/flatbuffers/go" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/ipc/reader.go b/go/arrow/ipc/reader.go index 1f684c1f6c034..826062fafdb46 100644 --- a/go/arrow/ipc/reader.go +++ b/go/arrow/ipc/reader.go @@ -23,13 +23,13 @@ import ( "io" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/endian" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/internal/dictutils" - "github.com/apache/arrow/go/v14/arrow/internal/flatbuf" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/internal/dictutils" + "github.com/apache/arrow/go/v15/arrow/internal/flatbuf" + "github.com/apache/arrow/go/v15/arrow/memory" ) // Reader reads records from an io.Reader. diff --git a/go/arrow/ipc/reader_test.go b/go/arrow/ipc/reader_test.go index 229374e94ec74..f00f3bb3da476 100644 --- a/go/arrow/ipc/reader_test.go +++ b/go/arrow/ipc/reader_test.go @@ -20,9 +20,9 @@ import ( "bytes" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/go/arrow/ipc/stream_test.go b/go/arrow/ipc/stream_test.go index d8bbd8f04b14d..201bfb82a5677 100644 --- a/go/arrow/ipc/stream_test.go +++ b/go/arrow/ipc/stream_test.go @@ -22,9 +22,9 @@ import ( "strconv" "testing" - "github.com/apache/arrow/go/v14/arrow/internal/arrdata" - "github.com/apache/arrow/go/v14/arrow/internal/flatbuf" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/internal/arrdata" + "github.com/apache/arrow/go/v15/arrow/internal/flatbuf" + "github.com/apache/arrow/go/v15/arrow/memory" ) func TestStream(t *testing.T) { diff --git a/go/arrow/ipc/writer.go b/go/arrow/ipc/writer.go index a97f47ef4aa43..e9d59f0e35e00 100644 --- a/go/arrow/ipc/writer.go +++ b/go/arrow/ipc/writer.go @@ -26,14 +26,14 @@ import ( "sync" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/internal" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/internal/dictutils" - "github.com/apache/arrow/go/v14/arrow/internal/flatbuf" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/internal" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/internal/dictutils" + "github.com/apache/arrow/go/v15/arrow/internal/flatbuf" + "github.com/apache/arrow/go/v15/arrow/memory" ) type swriter struct { @@ -277,7 +277,7 @@ type dictEncoder struct { } func (d *dictEncoder) encodeMetadata(p *Payload, isDelta bool, id, nrows int64) error { - p.meta = writeDictionaryMessage(d.mem, id, isDelta, nrows, p.size, d.fields, d.meta, d.codec) + p.meta = writeDictionaryMessage(d.mem, id, isDelta, nrows, p.size, d.fields, d.meta, d.codec, d.variadicCounts) return nil } @@ -300,8 +300,9 @@ func (d *dictEncoder) Encode(p *Payload, id int64, isDelta bool, dict arrow.Arra type recordEncoder struct { mem memory.Allocator - fields []fieldMetadata - meta []bufferMetadata + fields []fieldMetadata + meta []bufferMetadata + variadicCounts []int64 depth int64 start int64 @@ -602,6 +603,33 @@ func (w *recordEncoder) visit(p *Payload, arr arrow.Array) error { p.body = append(p.body, voffsets) p.body = append(p.body, values) + case arrow.BinaryViewDataType: + data := arr.Data() + values := data.Buffers()[1] + arrLen := int64(arr.Len()) + typeWidth := int64(arrow.ViewHeaderSizeBytes) + minLength := paddedLength(arrLen*typeWidth, kArrowAlignment) + + switch { + case needTruncate(int64(data.Offset()), values, minLength): + // non-zero offset: slice the buffer + offset := data.Offset() * int(typeWidth) + // send padding if available + len := int(minI64(bitutil.CeilByte64(arrLen*typeWidth), int64(values.Len()-offset))) + values = memory.SliceBuffer(values, offset, len) + default: + if values != nil { + values.Retain() + } + } + p.body = append(p.body, values) + + w.variadicCounts = append(w.variadicCounts, int64(len(data.Buffers())-2)) + for _, b := range data.Buffers()[2:] { + b.Retain() + p.body = append(p.body, b) + } + case *arrow.StructType: w.depth-- arr := arr.(*array.Struct) @@ -946,7 +974,7 @@ func (w *recordEncoder) Encode(p *Payload, rec arrow.Record) error { } func (w *recordEncoder) encodeMetadata(p *Payload, nrows int64) error { - p.meta = writeRecordMessage(w.mem, nrows, p.size, w.fields, w.meta, w.codec) + p.meta = writeRecordMessage(w.mem, nrows, p.size, w.fields, w.meta, w.codec, w.variadicCounts) return nil } diff --git a/go/arrow/ipc/writer_test.go b/go/arrow/ipc/writer_test.go index da461c3d52272..ea7592554c888 100644 --- a/go/arrow/ipc/writer_test.go +++ b/go/arrow/ipc/writer_test.go @@ -24,11 +24,11 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/internal/flatbuf" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/internal/flatbuf" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/go/arrow/math/float64.go b/go/arrow/math/float64.go index 81de594df6ce6..8d72ecf1b2668 100644 --- a/go/arrow/math/float64.go +++ b/go/arrow/math/float64.go @@ -19,7 +19,7 @@ package math import ( - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow/array" ) type Float64Funcs struct { diff --git a/go/arrow/math/float64_avx2_amd64.go b/go/arrow/math/float64_avx2_amd64.go index 8a131a2bc8fc5..e78d4affe9f6f 100644 --- a/go/arrow/math/float64_avx2_amd64.go +++ b/go/arrow/math/float64_avx2_amd64.go @@ -24,7 +24,7 @@ package math import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow/array" ) //go:noescape diff --git a/go/arrow/math/float64_neon_arm64.go b/go/arrow/math/float64_neon_arm64.go index ad9b6435b26fc..a5013960d9ec7 100755 --- a/go/arrow/math/float64_neon_arm64.go +++ b/go/arrow/math/float64_neon_arm64.go @@ -24,7 +24,7 @@ package math import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow/array" ) //go:noescape diff --git a/go/arrow/math/float64_sse4_amd64.go b/go/arrow/math/float64_sse4_amd64.go index a2be5de43f5bb..cdd88f91c2c2b 100644 --- a/go/arrow/math/float64_sse4_amd64.go +++ b/go/arrow/math/float64_sse4_amd64.go @@ -24,7 +24,7 @@ package math import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow/array" ) //go:noescape diff --git a/go/arrow/math/float64_test.go b/go/arrow/math/float64_test.go index b10214f6d892f..3a7c247c5df8c 100644 --- a/go/arrow/math/float64_test.go +++ b/go/arrow/math/float64_test.go @@ -21,9 +21,9 @@ package math_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/math" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/math" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/math/int64.go b/go/arrow/math/int64.go index 2adff970c2233..b8236dfbb6552 100644 --- a/go/arrow/math/int64.go +++ b/go/arrow/math/int64.go @@ -19,7 +19,7 @@ package math import ( - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow/array" ) type Int64Funcs struct { diff --git a/go/arrow/math/int64_avx2_amd64.go b/go/arrow/math/int64_avx2_amd64.go index ac0b2e1ff662a..5c16e790c1e55 100644 --- a/go/arrow/math/int64_avx2_amd64.go +++ b/go/arrow/math/int64_avx2_amd64.go @@ -24,7 +24,7 @@ package math import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow/array" ) //go:noescape diff --git a/go/arrow/math/int64_neon_arm64.go b/go/arrow/math/int64_neon_arm64.go index 4ab5fb771a526..00ec48b6e4735 100755 --- a/go/arrow/math/int64_neon_arm64.go +++ b/go/arrow/math/int64_neon_arm64.go @@ -24,7 +24,7 @@ package math import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow/array" ) //go:noescape diff --git a/go/arrow/math/int64_sse4_amd64.go b/go/arrow/math/int64_sse4_amd64.go index 85304210eb448..a894bad2ef6a3 100644 --- a/go/arrow/math/int64_sse4_amd64.go +++ b/go/arrow/math/int64_sse4_amd64.go @@ -24,7 +24,7 @@ package math import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow/array" ) //go:noescape diff --git a/go/arrow/math/int64_test.go b/go/arrow/math/int64_test.go index 06b42ffe9351b..3781fd3570785 100644 --- a/go/arrow/math/int64_test.go +++ b/go/arrow/math/int64_test.go @@ -21,9 +21,9 @@ package math_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/math" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/math" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/math/type.go.tmpl b/go/arrow/math/type.go.tmpl index 7ae142c46f12b..f1a4dfa1362f8 100644 --- a/go/arrow/math/type.go.tmpl +++ b/go/arrow/math/type.go.tmpl @@ -17,7 +17,7 @@ package math import ( - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow/array" ) {{$def := .D}} diff --git a/go/arrow/math/type_simd_amd64.go.tmpl b/go/arrow/math/type_simd_amd64.go.tmpl index 72d40ea386a8f..77dee758b05cd 100644 --- a/go/arrow/math/type_simd_amd64.go.tmpl +++ b/go/arrow/math/type_simd_amd64.go.tmpl @@ -21,7 +21,7 @@ package math import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow/array" ) {{$name := printf "%s_%s" .In.Type .D.arch}} diff --git a/go/arrow/math/type_simd_arm64.go.tmpl b/go/arrow/math/type_simd_arm64.go.tmpl index 72d40ea386a8f..77dee758b05cd 100755 --- a/go/arrow/math/type_simd_arm64.go.tmpl +++ b/go/arrow/math/type_simd_arm64.go.tmpl @@ -21,7 +21,7 @@ package math import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow/array" ) {{$name := printf "%s_%s" .In.Type .D.arch}} diff --git a/go/arrow/math/type_test.go.tmpl b/go/arrow/math/type_test.go.tmpl index 160fc34e4d4d4..969377e454e52 100644 --- a/go/arrow/math/type_test.go.tmpl +++ b/go/arrow/math/type_test.go.tmpl @@ -19,9 +19,9 @@ package math_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/math" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/math" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/math/uint64.go b/go/arrow/math/uint64.go index 64ac63a3d1853..b9a70360224ea 100644 --- a/go/arrow/math/uint64.go +++ b/go/arrow/math/uint64.go @@ -19,7 +19,7 @@ package math import ( - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow/array" ) type Uint64Funcs struct { diff --git a/go/arrow/math/uint64_avx2_amd64.go b/go/arrow/math/uint64_avx2_amd64.go index 2a35bebc980b3..c1f48efefd8eb 100644 --- a/go/arrow/math/uint64_avx2_amd64.go +++ b/go/arrow/math/uint64_avx2_amd64.go @@ -24,7 +24,7 @@ package math import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow/array" ) //go:noescape diff --git a/go/arrow/math/uint64_neon_arm64.go b/go/arrow/math/uint64_neon_arm64.go index f6588694bb1fd..37ca4ecad04e7 100755 --- a/go/arrow/math/uint64_neon_arm64.go +++ b/go/arrow/math/uint64_neon_arm64.go @@ -24,7 +24,7 @@ package math import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow/array" ) //go:noescape diff --git a/go/arrow/math/uint64_sse4_amd64.go b/go/arrow/math/uint64_sse4_amd64.go index 8c6621464f118..7163c1de171de 100644 --- a/go/arrow/math/uint64_sse4_amd64.go +++ b/go/arrow/math/uint64_sse4_amd64.go @@ -24,7 +24,7 @@ package math import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow/array" ) //go:noescape diff --git a/go/arrow/math/uint64_test.go b/go/arrow/math/uint64_test.go index aac768e599cf8..e8ba42b59aa10 100644 --- a/go/arrow/math/uint64_test.go +++ b/go/arrow/math/uint64_test.go @@ -21,9 +21,9 @@ package math_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/math" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/math" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/memory/buffer.go b/go/arrow/memory/buffer.go index 5a2b4297031f0..ffdb41e3dbe9e 100644 --- a/go/arrow/memory/buffer.go +++ b/go/arrow/memory/buffer.go @@ -19,7 +19,7 @@ package memory import ( "sync/atomic" - "github.com/apache/arrow/go/v14/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/internal/debug" ) // Buffer is a wrapper type for a buffer of bytes. diff --git a/go/arrow/memory/buffer_test.go b/go/arrow/memory/buffer_test.go index 25a0b39848519..92bb071d0abde 100644 --- a/go/arrow/memory/buffer_test.go +++ b/go/arrow/memory/buffer_test.go @@ -19,7 +19,7 @@ package memory_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/memory/cgo_allocator.go b/go/arrow/memory/cgo_allocator.go index 85ee445216f6a..af25d1899a6fe 100644 --- a/go/arrow/memory/cgo_allocator.go +++ b/go/arrow/memory/cgo_allocator.go @@ -22,7 +22,7 @@ package memory import ( "runtime" - cga "github.com/apache/arrow/go/v14/arrow/memory/internal/cgoalloc" + cga "github.com/apache/arrow/go/v15/arrow/memory/internal/cgoalloc" ) // CgoArrowAllocator is an allocator which exposes the C++ memory pool class diff --git a/go/arrow/memory/default_mallocator.go b/go/arrow/memory/default_mallocator.go index 12ad0846677b1..4a9ef942fd08d 100644 --- a/go/arrow/memory/default_mallocator.go +++ b/go/arrow/memory/default_mallocator.go @@ -19,7 +19,7 @@ package memory import ( - "github.com/apache/arrow/go/v14/arrow/memory/mallocator" + "github.com/apache/arrow/go/v15/arrow/memory/mallocator" ) // DefaultAllocator is a default implementation of Allocator and can be used anywhere diff --git a/go/arrow/memory/default_mallocator_test.go b/go/arrow/memory/default_mallocator_test.go index b209ced68a067..5a38e8b4e843c 100644 --- a/go/arrow/memory/default_mallocator_test.go +++ b/go/arrow/memory/default_mallocator_test.go @@ -21,8 +21,8 @@ package memory_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/memory/mallocator" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/memory/mallocator" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/memory/mallocator/mallocator_test.go b/go/arrow/memory/mallocator/mallocator_test.go index 5b09355033f70..91b74383ed494 100644 --- a/go/arrow/memory/mallocator/mallocator_test.go +++ b/go/arrow/memory/mallocator/mallocator_test.go @@ -23,7 +23,7 @@ import ( "fmt" "testing" - "github.com/apache/arrow/go/v14/arrow/memory/mallocator" + "github.com/apache/arrow/go/v15/arrow/memory/mallocator" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/memory/memory_test.go b/go/arrow/memory/memory_test.go index dff2e0bfd0712..adaa8359369be 100644 --- a/go/arrow/memory/memory_test.go +++ b/go/arrow/memory/memory_test.go @@ -19,7 +19,7 @@ package memory_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/memory" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/record.go b/go/arrow/record.go index d98c7732ef798..885af38034938 100644 --- a/go/arrow/record.go +++ b/go/arrow/record.go @@ -16,7 +16,7 @@ package arrow -import "github.com/apache/arrow/go/v14/internal/json" +import "github.com/apache/arrow/go/v15/internal/json" // Record is a collection of equal-length arrays matching a particular Schema. // Also known as a RecordBatch in the spec and in some implementations. diff --git a/go/arrow/scalar/append.go b/go/arrow/scalar/append.go index 9bcfe3e22dc63..fe8dd32e9eae5 100644 --- a/go/arrow/scalar/append.go +++ b/go/arrow/scalar/append.go @@ -21,11 +21,11 @@ package scalar import ( "fmt" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/float16" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/float16" "golang.org/x/exp/constraints" ) diff --git a/go/arrow/scalar/append_test.go b/go/arrow/scalar/append_test.go index 497a6a2d02933..e509bca1fb7b0 100644 --- a/go/arrow/scalar/append_test.go +++ b/go/arrow/scalar/append_test.go @@ -23,11 +23,11 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/internal/testing/tools" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/internal/testing/tools" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" diff --git a/go/arrow/scalar/binary.go b/go/arrow/scalar/binary.go index 3c041210fde9c..b6abe9cba7b1c 100644 --- a/go/arrow/scalar/binary.go +++ b/go/arrow/scalar/binary.go @@ -21,8 +21,8 @@ import ( "fmt" "unicode/utf8" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/memory" ) type BinaryScalar interface { diff --git a/go/arrow/scalar/compare.go b/go/arrow/scalar/compare.go index be7fa4d01cc19..b4a3fe08d26fe 100644 --- a/go/arrow/scalar/compare.go +++ b/go/arrow/scalar/compare.go @@ -16,7 +16,7 @@ package scalar -import "github.com/apache/arrow/go/v14/arrow" +import "github.com/apache/arrow/go/v15/arrow" // Equals returns true if two scalars are equal, which means they have the same // datatype, validity and value. diff --git a/go/arrow/scalar/nested.go b/go/arrow/scalar/nested.go index 87e84210f58a1..8250beb5ed90d 100644 --- a/go/arrow/scalar/nested.go +++ b/go/arrow/scalar/nested.go @@ -21,10 +21,10 @@ import ( "errors" "fmt" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" "golang.org/x/xerrors" ) diff --git a/go/arrow/scalar/numeric.gen.go b/go/arrow/scalar/numeric.gen.go index 90ae2a7004ce3..da088162f4017 100644 --- a/go/arrow/scalar/numeric.gen.go +++ b/go/arrow/scalar/numeric.gen.go @@ -24,9 +24,9 @@ import ( "reflect" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" ) type Int8 struct { diff --git a/go/arrow/scalar/numeric.gen_test.go b/go/arrow/scalar/numeric.gen_test.go index 987cd66c75f3c..807857c1c98c2 100644 --- a/go/arrow/scalar/numeric.gen_test.go +++ b/go/arrow/scalar/numeric.gen_test.go @@ -21,8 +21,8 @@ package scalar_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/scalar" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/scalar/numeric.gen_test.go.tmpl b/go/arrow/scalar/numeric.gen_test.go.tmpl index c975cc9db6e1c..7f45898a20d29 100644 --- a/go/arrow/scalar/numeric.gen_test.go.tmpl +++ b/go/arrow/scalar/numeric.gen_test.go.tmpl @@ -19,8 +19,8 @@ package scalar_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/scalar" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/scalar/parse.go b/go/arrow/scalar/parse.go index fcffe1ea6adf8..5002f98a65c42 100644 --- a/go/arrow/scalar/parse.go +++ b/go/arrow/scalar/parse.go @@ -25,12 +25,12 @@ import ( "strings" "time" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/float16" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/arrow/memory" ) type TypeToScalar interface { diff --git a/go/arrow/scalar/scalar.go b/go/arrow/scalar/scalar.go index 395771fa89c94..8b0d3ace2ad78 100644 --- a/go/arrow/scalar/scalar.go +++ b/go/arrow/scalar/scalar.go @@ -26,16 +26,16 @@ import ( "strconv" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/encoded" - "github.com/apache/arrow/go/v14/arrow/endian" - "github.com/apache/arrow/go/v14/arrow/float16" - "github.com/apache/arrow/go/v14/arrow/internal/debug" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/encoded" + "github.com/apache/arrow/go/v15/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/memory" "golang.org/x/xerrors" ) diff --git a/go/arrow/scalar/scalar_test.go b/go/arrow/scalar/scalar_test.go index 405571a14880d..ce8170301b0a2 100644 --- a/go/arrow/scalar/scalar_test.go +++ b/go/arrow/scalar/scalar_test.go @@ -25,12 +25,12 @@ import ( "testing" "time" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/scalar" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/scalar" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" diff --git a/go/arrow/scalar/temporal.go b/go/arrow/scalar/temporal.go index 880416f7dd5a3..ee13c84429e8e 100644 --- a/go/arrow/scalar/temporal.go +++ b/go/arrow/scalar/temporal.go @@ -22,7 +22,7 @@ import ( "time" "unsafe" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) func temporalToString(s TemporalScalar) string { diff --git a/go/arrow/schema.go b/go/arrow/schema.go index a7fa434131925..e84f350a53637 100644 --- a/go/arrow/schema.go +++ b/go/arrow/schema.go @@ -21,7 +21,7 @@ import ( "sort" "strings" - "github.com/apache/arrow/go/v14/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/endian" ) type Metadata struct { diff --git a/go/arrow/schema_test.go b/go/arrow/schema_test.go index 5e79723f90352..fddf1d7f131ec 100644 --- a/go/arrow/schema_test.go +++ b/go/arrow/schema_test.go @@ -21,7 +21,7 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/endian" "github.com/stretchr/testify/assert" ) diff --git a/go/arrow/table.go b/go/arrow/table.go index 5a68085f8df93..82dc283706b65 100644 --- a/go/arrow/table.go +++ b/go/arrow/table.go @@ -20,7 +20,7 @@ import ( "fmt" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/internal/debug" ) // Table represents a logical sequence of chunked arrays of equal length. It is diff --git a/go/arrow/tensor/numeric.gen.go b/go/arrow/tensor/numeric.gen.go index 5b7bbde150fe2..d207f0bfa2c9c 100644 --- a/go/arrow/tensor/numeric.gen.go +++ b/go/arrow/tensor/numeric.gen.go @@ -19,7 +19,7 @@ package tensor import ( - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) // Int8 is an n-dim array of int8s. diff --git a/go/arrow/tensor/numeric.gen.go.tmpl b/go/arrow/tensor/numeric.gen.go.tmpl index c693082192247..e03f986da3f8f 100644 --- a/go/arrow/tensor/numeric.gen.go.tmpl +++ b/go/arrow/tensor/numeric.gen.go.tmpl @@ -17,8 +17,8 @@ package tensor import ( - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" ) {{range .In}} diff --git a/go/arrow/tensor/numeric.gen_test.go b/go/arrow/tensor/numeric.gen_test.go index 00922a84b6b03..3a7c3570c216b 100644 --- a/go/arrow/tensor/numeric.gen_test.go +++ b/go/arrow/tensor/numeric.gen_test.go @@ -23,10 +23,10 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/tensor" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/tensor" ) func TestTensorInt8(t *testing.T) { diff --git a/go/arrow/tensor/numeric.gen_test.go.tmpl b/go/arrow/tensor/numeric.gen_test.go.tmpl index 665bb8dc68da9..34f01b40d8084 100644 --- a/go/arrow/tensor/numeric.gen_test.go.tmpl +++ b/go/arrow/tensor/numeric.gen_test.go.tmpl @@ -21,10 +21,10 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/tensor" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/tensor" ) {{range .In}} diff --git a/go/arrow/tensor/tensor.go b/go/arrow/tensor/tensor.go index 27c380345a498..1f2ed7e82141b 100644 --- a/go/arrow/tensor/tensor.go +++ b/go/arrow/tensor/tensor.go @@ -21,8 +21,8 @@ import ( "fmt" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/internal/debug" ) // Interface represents an n-dimensional array of numerical data. diff --git a/go/arrow/tensor/tensor_test.go b/go/arrow/tensor/tensor_test.go index 4cfe69f780421..552c4c1c223e0 100644 --- a/go/arrow/tensor/tensor_test.go +++ b/go/arrow/tensor/tensor_test.go @@ -21,10 +21,10 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/tensor" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/tensor" ) func TestTensor(t *testing.T) { diff --git a/go/arrow/type_traits_boolean.go b/go/arrow/type_traits_boolean.go index 6a46bdec702ff..c164d45954fc0 100644 --- a/go/arrow/type_traits_boolean.go +++ b/go/arrow/type_traits_boolean.go @@ -17,7 +17,7 @@ package arrow import ( - "github.com/apache/arrow/go/v14/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/bitutil" ) type booleanTraits struct{} diff --git a/go/arrow/type_traits_decimal128.go b/go/arrow/type_traits_decimal128.go index d2d3aae371663..f573ad3c65a4c 100644 --- a/go/arrow/type_traits_decimal128.go +++ b/go/arrow/type_traits_decimal128.go @@ -20,8 +20,8 @@ import ( "reflect" "unsafe" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/endian" ) // Decimal128 traits diff --git a/go/arrow/type_traits_decimal256.go b/go/arrow/type_traits_decimal256.go index 256ed68fffc8d..adf3cc3e0bc31 100644 --- a/go/arrow/type_traits_decimal256.go +++ b/go/arrow/type_traits_decimal256.go @@ -20,8 +20,8 @@ import ( "reflect" "unsafe" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/endian" ) // Decimal256 traits diff --git a/go/arrow/type_traits_float16.go b/go/arrow/type_traits_float16.go index c40363d3785da..e59efd4c248d8 100644 --- a/go/arrow/type_traits_float16.go +++ b/go/arrow/type_traits_float16.go @@ -20,8 +20,8 @@ import ( "reflect" "unsafe" - "github.com/apache/arrow/go/v14/arrow/endian" - "github.com/apache/arrow/go/v14/arrow/float16" + "github.com/apache/arrow/go/v15/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/float16" ) // Float16 traits diff --git a/go/arrow/type_traits_interval.go b/go/arrow/type_traits_interval.go index 35e6057090f10..5fbd7a5248918 100644 --- a/go/arrow/type_traits_interval.go +++ b/go/arrow/type_traits_interval.go @@ -20,8 +20,8 @@ import ( "reflect" "unsafe" - "github.com/apache/arrow/go/v14/arrow/endian" - "github.com/apache/arrow/go/v14/arrow/internal/debug" + "github.com/apache/arrow/go/v15/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/internal/debug" ) var ( diff --git a/go/arrow/type_traits_numeric.gen.go b/go/arrow/type_traits_numeric.gen.go index 6edd75291155c..57606c0fce6df 100644 --- a/go/arrow/type_traits_numeric.gen.go +++ b/go/arrow/type_traits_numeric.gen.go @@ -23,7 +23,7 @@ import ( "reflect" "unsafe" - "github.com/apache/arrow/go/v14/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/endian" ) var ( diff --git a/go/arrow/type_traits_numeric.gen.go.tmpl b/go/arrow/type_traits_numeric.gen.go.tmpl index ffae975c1aa15..c491047b51429 100644 --- a/go/arrow/type_traits_numeric.gen.go.tmpl +++ b/go/arrow/type_traits_numeric.gen.go.tmpl @@ -21,7 +21,7 @@ import ( "reflect" "unsafe" - "github.com/apache/arrow/go/v14/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/endian" ) var ( diff --git a/go/arrow/type_traits_numeric.gen_test.go b/go/arrow/type_traits_numeric.gen_test.go index 74395485736ff..ac2d0726ed6fa 100644 --- a/go/arrow/type_traits_numeric.gen_test.go +++ b/go/arrow/type_traits_numeric.gen_test.go @@ -22,7 +22,7 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) func TestInt64Traits(t *testing.T) { diff --git a/go/arrow/type_traits_numeric.gen_test.go.tmpl b/go/arrow/type_traits_numeric.gen_test.go.tmpl index 96685f31327bd..4948f42cf542e 100644 --- a/go/arrow/type_traits_numeric.gen_test.go.tmpl +++ b/go/arrow/type_traits_numeric.gen_test.go.tmpl @@ -20,7 +20,7 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) {{- range .In}} diff --git a/go/arrow/type_traits_test.go b/go/arrow/type_traits_test.go index 85ad4da9d041d..0ae88b4bd82b6 100644 --- a/go/arrow/type_traits_test.go +++ b/go/arrow/type_traits_test.go @@ -22,10 +22,10 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/float16" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/float16" ) func TestBooleanTraits(t *testing.T) { diff --git a/go/arrow/type_traits_timestamp.go b/go/arrow/type_traits_timestamp.go index 7c393b3559f82..c1a9aba3db386 100644 --- a/go/arrow/type_traits_timestamp.go +++ b/go/arrow/type_traits_timestamp.go @@ -20,7 +20,7 @@ import ( "reflect" "unsafe" - "github.com/apache/arrow/go/v14/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/endian" ) var TimestampTraits timestampTraits diff --git a/go/arrow/type_traits_view.go b/go/arrow/type_traits_view.go new file mode 100644 index 0000000000000..c3846db294681 --- /dev/null +++ b/go/arrow/type_traits_view.go @@ -0,0 +1,53 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package arrow + +import ( + "reflect" + "unsafe" + + "github.com/apache/arrow/go/v15/arrow/endian" +) + +var ViewHeaderTraits viewHeaderTraits + +const ( + ViewHeaderSizeBytes = int(unsafe.Sizeof(ViewHeader{})) +) + +type viewHeaderTraits struct{} + +func (viewHeaderTraits) BytesRequired(n int) int { return ViewHeaderSizeBytes * n } + +func (viewHeaderTraits) PutValue(b []byte, v ViewHeader) { + endian.Native.PutUint32(b, uint32(v.size)) + copy(b[4:], v.data[:]) +} + +func (viewHeaderTraits) CastFromBytes(b []byte) (res []ViewHeader) { + h := (*reflect.SliceHeader)(unsafe.Pointer(&b)) + + return unsafe.Slice((*ViewHeader)(unsafe.Pointer(h.Data)), cap(b)/ViewHeaderSizeBytes)[:len(b)/ViewHeaderSizeBytes] +} + +func (viewHeaderTraits) CastToBytes(b []ViewHeader) (res []byte) { + h := (*reflect.SliceHeader)(unsafe.Pointer(&b)) + + return unsafe.Slice((*byte)(unsafe.Pointer(h.Data)), cap(b)*ViewHeaderSizeBytes)[:len(b)*ViewHeaderSizeBytes] +} + +func (viewHeaderTraits) Copy(dst, src []ViewHeader) { copy(dst, src) } diff --git a/go/arrow/util/byte_size.go b/go/arrow/util/byte_size.go index 141b78c459119..6d6fc021f8005 100644 --- a/go/arrow/util/byte_size.go +++ b/go/arrow/util/byte_size.go @@ -17,9 +17,9 @@ package util import ( - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" ) func isArrayDataNil(arrayData arrow.ArrayData) bool { diff --git a/go/arrow/util/byte_size_test.go b/go/arrow/util/byte_size_test.go index 052bf3adee158..a218c69558fe9 100644 --- a/go/arrow/util/byte_size_test.go +++ b/go/arrow/util/byte_size_test.go @@ -20,10 +20,10 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/arrow/util" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/arrow/util" "github.com/stretchr/testify/assert" ) diff --git a/go/go.mod b/go/go.mod index bf685eef910ed..a6c2af7025d32 100644 --- a/go/go.mod +++ b/go/go.mod @@ -14,7 +14,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -module github.com/apache/arrow/go/v14 +module github.com/apache/arrow/go/v15 go 1.20 @@ -40,7 +40,7 @@ require ( golang.org/x/tools v0.14.0 golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 gonum.org/v1/gonum v0.12.0 - google.golang.org/grpc v1.58.2 + google.golang.org/grpc v1.58.3 google.golang.org/protobuf v1.31.0 modernc.org/sqlite v1.21.2 ) diff --git a/go/go.sum b/go/go.sum index d1e8884e7a2f6..bdd499c3f5190 100644 --- a/go/go.sum +++ b/go/go.sum @@ -107,8 +107,8 @@ gonum.org/v1/gonum v0.12.0 h1:xKuo6hzt+gMav00meVPUlXwSdoEJP46BR+wdxQEFK2o= gonum.org/v1/gonum v0.12.0/go.mod h1:73TDxJfAAHeA8Mk9mf8NlIppyhQNo5GLTcYeqgo2lvY= google.golang.org/genproto/googleapis/rpc v0.0.0-20231002182017-d307bd883b97 h1:6GQBEOdGkX6MMTLT9V+TjtIRZCw9VPD5Z+yHY9wMgS0= google.golang.org/genproto/googleapis/rpc v0.0.0-20231002182017-d307bd883b97/go.mod h1:v7nGkzlmW8P3n/bKmWBn2WpBjpOEx8Q6gMueudAmKfY= -google.golang.org/grpc v1.58.2 h1:SXUpjxeVF3FKrTYQI4f4KvbGD5u2xccdYdurwowix5I= -google.golang.org/grpc v1.58.2/go.mod h1:tgX3ZQDlNJGU96V6yHh1T/JeoBQ2TXdr43YbYSsCJk0= +google.golang.org/grpc v1.58.3 h1:BjnpXut1btbtgN/6sp+brB2Kbm2LjNXnidYujAVbSoQ= +google.golang.org/grpc v1.58.3/go.mod h1:tgX3ZQDlNJGU96V6yHh1T/JeoBQ2TXdr43YbYSsCJk0= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= diff --git a/go/internal/bitutils/bit_block_counter.go b/go/internal/bitutils/bit_block_counter.go index 86818bfd45ab7..99eece34cd068 100644 --- a/go/internal/bitutils/bit_block_counter.go +++ b/go/internal/bitutils/bit_block_counter.go @@ -21,8 +21,8 @@ import ( "math/bits" "unsafe" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/internal/utils" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/internal/utils" ) func loadWord(byt []byte) uint64 { diff --git a/go/internal/bitutils/bit_block_counter_test.go b/go/internal/bitutils/bit_block_counter_test.go index ad5e989d1fa65..790105c290182 100644 --- a/go/internal/bitutils/bit_block_counter_test.go +++ b/go/internal/bitutils/bit_block_counter_test.go @@ -19,9 +19,9 @@ package bitutils_test import ( "testing" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/bitutils" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/bitutils" "github.com/stretchr/testify/assert" "golang.org/x/exp/rand" ) diff --git a/go/internal/bitutils/bit_run_reader.go b/go/internal/bitutils/bit_run_reader.go index a1686a4909b8b..f09149d7ec5df 100644 --- a/go/internal/bitutils/bit_run_reader.go +++ b/go/internal/bitutils/bit_run_reader.go @@ -22,9 +22,9 @@ import ( "math/bits" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/internal/utils" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/internal/utils" ) // BitRun represents a run of bits with the same value of length Len diff --git a/go/internal/bitutils/bit_run_reader_test.go b/go/internal/bitutils/bit_run_reader_test.go index 0e1500bff811a..7db76768a9476 100644 --- a/go/internal/bitutils/bit_run_reader_test.go +++ b/go/internal/bitutils/bit_run_reader_test.go @@ -21,9 +21,9 @@ import ( "testing" "unsafe" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/endian" - "github.com/apache/arrow/go/v14/internal/bitutils" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/endian" + "github.com/apache/arrow/go/v15/internal/bitutils" "github.com/stretchr/testify/assert" ) diff --git a/go/internal/bitutils/bit_set_run_reader.go b/go/internal/bitutils/bit_set_run_reader.go index a2269ffecb870..6764ca7912679 100644 --- a/go/internal/bitutils/bit_set_run_reader.go +++ b/go/internal/bitutils/bit_set_run_reader.go @@ -20,8 +20,8 @@ import ( "encoding/binary" "math/bits" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/internal/utils" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/internal/utils" ) // IsMultipleOf64 returns whether v is a multiple of 64. diff --git a/go/internal/bitutils/bit_set_run_reader_test.go b/go/internal/bitutils/bit_set_run_reader_test.go index 205954b3800a8..832993671ef6d 100644 --- a/go/internal/bitutils/bit_set_run_reader_test.go +++ b/go/internal/bitutils/bit_set_run_reader_test.go @@ -20,9 +20,9 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/internal/bitutils" - "github.com/apache/arrow/go/v14/internal/utils" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/internal/bitutils" + "github.com/apache/arrow/go/v15/internal/utils" "github.com/stretchr/testify/suite" ) diff --git a/go/internal/bitutils/bitmap_generate.go b/go/internal/bitutils/bitmap_generate.go index 78219d8120bcc..08b5fceab57d3 100644 --- a/go/internal/bitutils/bitmap_generate.go +++ b/go/internal/bitutils/bitmap_generate.go @@ -16,7 +16,7 @@ package bitutils -import "github.com/apache/arrow/go/v14/arrow/bitutil" +import "github.com/apache/arrow/go/v15/arrow/bitutil" // GenerateBits writes sequential bits to a bitmap. Bits preceding the // initial start offset are preserved, bits following the bitmap may diff --git a/go/internal/bitutils/bitmap_generate_test.go b/go/internal/bitutils/bitmap_generate_test.go index 788b2198c3a78..c9a6203864a20 100644 --- a/go/internal/bitutils/bitmap_generate_test.go +++ b/go/internal/bitutils/bitmap_generate_test.go @@ -19,7 +19,7 @@ package bitutils_test import ( "testing" - "github.com/apache/arrow/go/v14/internal/bitutils" + "github.com/apache/arrow/go/v15/internal/bitutils" "golang.org/x/exp/rand" ) diff --git a/go/internal/hashing/xxh3_memo_table.gen.go b/go/internal/hashing/xxh3_memo_table.gen.go index cc996552b49e5..39b82cdeff9a2 100644 --- a/go/internal/hashing/xxh3_memo_table.gen.go +++ b/go/internal/hashing/xxh3_memo_table.gen.go @@ -21,9 +21,9 @@ package hashing import ( "math" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/internal/utils" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/internal/utils" ) type payloadInt8 struct { diff --git a/go/internal/hashing/xxh3_memo_table.gen.go.tmpl b/go/internal/hashing/xxh3_memo_table.gen.go.tmpl index 25164341d227e..527008ad63c3c 100644 --- a/go/internal/hashing/xxh3_memo_table.gen.go.tmpl +++ b/go/internal/hashing/xxh3_memo_table.gen.go.tmpl @@ -17,8 +17,8 @@ package hashing import ( - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/internal/utils" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/internal/utils" ) {{range .In}} diff --git a/go/internal/types/extension_types.go b/go/internal/types/extension_types.go index 1a31014874bbe..e24c89efc7b8b 100644 --- a/go/internal/types/extension_types.go +++ b/go/internal/types/extension_types.go @@ -24,9 +24,9 @@ import ( "reflect" "strings" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/internal/json" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/internal/json" "github.com/google/uuid" "golang.org/x/xerrors" ) diff --git a/go/internal/types/extension_types_test.go b/go/internal/types/extension_types_test.go index 16273890b4c9c..f93f1000c9e4a 100644 --- a/go/internal/types/extension_types_test.go +++ b/go/internal/types/extension_types_test.go @@ -20,11 +20,11 @@ import ( "bytes" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/json" - "github.com/apache/arrow/go/v14/internal/types" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/json" + "github.com/apache/arrow/go/v15/internal/types" "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/go/internal/utils/transpose_ints_def.go b/go/internal/utils/transpose_ints_def.go index cc3b0abb59363..c52598d7148ea 100644 --- a/go/internal/utils/transpose_ints_def.go +++ b/go/internal/utils/transpose_ints_def.go @@ -19,7 +19,7 @@ package utils import ( "errors" - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) //go:generate go run ../../arrow/_tools/tmpl -i -data=transpose_ints.tmpldata -d arch=avx2 transpose_ints_simd.go.tmpl=transpose_ints_avx2_amd64.go diff --git a/go/internal/utils/transpose_ints_test.go b/go/internal/utils/transpose_ints_test.go index e41b61156abdc..73b2bbce3fc14 100644 --- a/go/internal/utils/transpose_ints_test.go +++ b/go/internal/utils/transpose_ints_test.go @@ -22,7 +22,7 @@ import ( "math/rand" "testing" - "github.com/apache/arrow/go/v14/internal/utils" + "github.com/apache/arrow/go/v15/internal/utils" ) var ( diff --git a/go/parquet/cmd/parquet_reader/dumper.go b/go/parquet/cmd/parquet_reader/dumper.go index a04dcf23b2d8d..4cb2ea4a96fee 100644 --- a/go/parquet/cmd/parquet_reader/dumper.go +++ b/go/parquet/cmd/parquet_reader/dumper.go @@ -22,9 +22,9 @@ import ( "reflect" "time" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/schema" ) const defaultBatchSize = 128 diff --git a/go/parquet/cmd/parquet_reader/main.go b/go/parquet/cmd/parquet_reader/main.go index e7d1cd7fc5a7d..0d651d8c294c5 100644 --- a/go/parquet/cmd/parquet_reader/main.go +++ b/go/parquet/cmd/parquet_reader/main.go @@ -25,11 +25,11 @@ import ( "strconv" "strings" - "github.com/apache/arrow/go/v14/internal/json" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/metadata" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/internal/json" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/metadata" + "github.com/apache/arrow/go/v15/parquet/schema" "github.com/docopt/docopt-go" ) diff --git a/go/parquet/cmd/parquet_schema/main.go b/go/parquet/cmd/parquet_schema/main.go index 28d1288b54aeb..01d541e8ac925 100644 --- a/go/parquet/cmd/parquet_schema/main.go +++ b/go/parquet/cmd/parquet_schema/main.go @@ -20,8 +20,8 @@ import ( "fmt" "os" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/schema" "github.com/docopt/docopt-go" ) diff --git a/go/parquet/compress/brotli.go b/go/parquet/compress/brotli.go index 1c1ae1dffaca5..a1199d9711435 100644 --- a/go/parquet/compress/brotli.go +++ b/go/parquet/compress/brotli.go @@ -21,7 +21,7 @@ import ( "io" "github.com/andybalholm/brotli" - "github.com/apache/arrow/go/v14/parquet/internal/debug" + "github.com/apache/arrow/go/v15/parquet/internal/debug" ) type brotliCodec struct{} diff --git a/go/parquet/compress/compress.go b/go/parquet/compress/compress.go index 4c4c792c2f614..f61147eb1ea10 100644 --- a/go/parquet/compress/compress.go +++ b/go/parquet/compress/compress.go @@ -23,7 +23,7 @@ import ( "fmt" "io" - "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" ) // Compression is an alias to the thrift compression codec enum type for easy use diff --git a/go/parquet/compress/compress_test.go b/go/parquet/compress/compress_test.go index e8041a64c0996..d1c55b15bc3cc 100644 --- a/go/parquet/compress/compress_test.go +++ b/go/parquet/compress/compress_test.go @@ -22,7 +22,7 @@ import ( "math/rand" "testing" - "github.com/apache/arrow/go/v14/parquet/compress" + "github.com/apache/arrow/go/v15/parquet/compress" "github.com/stretchr/testify/assert" ) diff --git a/go/parquet/compress/zstd.go b/go/parquet/compress/zstd.go index 129af62807175..fd8ec81953359 100644 --- a/go/parquet/compress/zstd.go +++ b/go/parquet/compress/zstd.go @@ -20,7 +20,7 @@ import ( "io" "sync" - "github.com/apache/arrow/go/v14/parquet/internal/debug" + "github.com/apache/arrow/go/v15/parquet/internal/debug" "github.com/klauspost/compress/zstd" ) diff --git a/go/parquet/doc.go b/go/parquet/doc.go index 99a605a284fae..afeee00587ef5 100644 --- a/go/parquet/doc.go +++ b/go/parquet/doc.go @@ -29,9 +29,9 @@ // Install // // You can download the library and cli utilities via: -// go get -u github.com/apache/arrow/go/v14/parquet -// go install github.com/apache/arrow/go/v14/parquet/cmd/parquet_reader@latest -// go install github.com/apache/arrow/go/v14/parquet/cmd/parquet_schema@latest +// go get -u github.com/apache/arrow/go/v15/parquet +// go install github.com/apache/arrow/go/v15/parquet/cmd/parquet_reader@latest +// go install github.com/apache/arrow/go/v15/parquet/cmd/parquet_schema@latest // // Modules // diff --git a/go/parquet/encryption_properties.go b/go/parquet/encryption_properties.go index dd85a856820fe..6eb6cf1fe5680 100644 --- a/go/parquet/encryption_properties.go +++ b/go/parquet/encryption_properties.go @@ -20,7 +20,7 @@ import ( "crypto/rand" "unicode/utf8" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" ) // Constants that will be used as the default values with encryption/decryption diff --git a/go/parquet/encryption_properties_test.go b/go/parquet/encryption_properties_test.go index 0b1118a5cfa66..ab028927c5ecb 100644 --- a/go/parquet/encryption_properties_test.go +++ b/go/parquet/encryption_properties_test.go @@ -19,8 +19,8 @@ package parquet_test import ( "testing" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/encryption" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/encryption" "github.com/stretchr/testify/assert" ) diff --git a/go/parquet/encryption_read_config_test.go b/go/parquet/encryption_read_config_test.go index 8a84c19900dd0..53b7ba3c621c0 100644 --- a/go/parquet/encryption_read_config_test.go +++ b/go/parquet/encryption_read_config_test.go @@ -23,10 +23,10 @@ import ( "path" "testing" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/internal/encryption" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/internal/encryption" "github.com/stretchr/testify/suite" ) diff --git a/go/parquet/encryption_write_config_test.go b/go/parquet/encryption_write_config_test.go index 6b2d049e1390c..558b89208c706 100644 --- a/go/parquet/encryption_write_config_test.go +++ b/go/parquet/encryption_write_config_test.go @@ -23,10 +23,10 @@ import ( "path/filepath" "testing" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/compress" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/compress" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/schema" "github.com/stretchr/testify/suite" ) diff --git a/go/parquet/file/column_reader.go b/go/parquet/file/column_reader.go index 1ebceaca234c1..766638d88f26c 100644 --- a/go/parquet/file/column_reader.go +++ b/go/parquet/file/column_reader.go @@ -17,16 +17,17 @@ package file import ( + "errors" "fmt" "sync" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" - "github.com/apache/arrow/go/v14/parquet/internal/encryption" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/parquet/internal/encryption" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet/schema" "golang.org/x/xerrors" ) @@ -345,6 +346,11 @@ func (c *columnChunkReader) initDataDecoder(page Page, lvlByteLen int64) error { c.curDecoder = decoder } else { switch encoding { + case format.Encoding_RLE: + if c.descr.PhysicalType() != parquet.Types.Boolean { + return fmt.Errorf("parquet: only boolean supports RLE encoding, got %s", c.descr.PhysicalType()) + } + fallthrough case format.Encoding_PLAIN, format.Encoding_DELTA_BYTE_ARRAY, format.Encoding_DELTA_LENGTH_BYTE_ARRAY, @@ -352,7 +358,7 @@ func (c *columnChunkReader) initDataDecoder(page Page, lvlByteLen int64) error { c.curDecoder = c.decoderTraits.Decoder(parquet.Encoding(encoding), c.descr, false, c.mem) c.decoders[encoding] = c.curDecoder case format.Encoding_RLE_DICTIONARY: - return xerrors.New("parquet: dictionary page must be before data page") + return errors.New("parquet: dictionary page must be before data page") case format.Encoding_BYTE_STREAM_SPLIT: return fmt.Errorf("parquet: unsupported data encoding %s", encoding) default: diff --git a/go/parquet/file/column_reader_test.go b/go/parquet/file/column_reader_test.go index 3ce05c6cfe47e..21ea52e2b7bbc 100755 --- a/go/parquet/file/column_reader_test.go +++ b/go/parquet/file/column_reader_test.go @@ -24,12 +24,12 @@ import ( "sync" "testing" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/internal/testutils" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/internal/testutils" + "github.com/apache/arrow/go/v15/parquet/schema" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" ) diff --git a/go/parquet/file/column_reader_types.gen.go b/go/parquet/file/column_reader_types.gen.go index 31dccc157642f..3fb113780f811 100644 --- a/go/parquet/file/column_reader_types.gen.go +++ b/go/parquet/file/column_reader_types.gen.go @@ -21,9 +21,9 @@ package file import ( "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" ) // Int32ColumnChunkReader is the Typed Column chunk reader instance for reading diff --git a/go/parquet/file/column_reader_types.gen.go.tmpl b/go/parquet/file/column_reader_types.gen.go.tmpl index 6557963e69f50..261b5f0bfacab 100644 --- a/go/parquet/file/column_reader_types.gen.go.tmpl +++ b/go/parquet/file/column_reader_types.gen.go.tmpl @@ -17,8 +17,8 @@ package file import ( - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" ) {{range .In}} diff --git a/go/parquet/file/column_writer.go b/go/parquet/file/column_writer.go index 9d9111042df28..0b0d1145d604e 100755 --- a/go/parquet/file/column_writer.go +++ b/go/parquet/file/column_writer.go @@ -21,14 +21,14 @@ import ( "encoding/binary" "io" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" - "github.com/apache/arrow/go/v14/parquet/metadata" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/parquet/metadata" + "github.com/apache/arrow/go/v15/parquet/schema" ) //go:generate go run ../../arrow/_tools/tmpl/main.go -i -data=../internal/encoding/physical_types.tmpldata column_writer_types.gen.go.tmpl diff --git a/go/parquet/file/column_writer_test.go b/go/parquet/file/column_writer_test.go index e2ae6b0f4a71e..134c290fa6894 100755 --- a/go/parquet/file/column_writer_test.go +++ b/go/parquet/file/column_writer_test.go @@ -24,19 +24,19 @@ import ( "sync" "testing" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" - arrutils "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/compress" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" - "github.com/apache/arrow/go/v14/parquet/internal/encryption" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/testutils" - "github.com/apache/arrow/go/v14/parquet/internal/utils" - "github.com/apache/arrow/go/v14/parquet/metadata" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" + arrutils "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/compress" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/parquet/internal/encryption" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/testutils" + "github.com/apache/arrow/go/v15/parquet/internal/utils" + "github.com/apache/arrow/go/v15/parquet/metadata" + "github.com/apache/arrow/go/v15/parquet/schema" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/suite" diff --git a/go/parquet/file/column_writer_types.gen.go b/go/parquet/file/column_writer_types.gen.go index 5594f63249fb8..b4d7954639319 100644 --- a/go/parquet/file/column_writer_types.gen.go +++ b/go/parquet/file/column_writer_types.gen.go @@ -22,12 +22,13 @@ import ( "errors" "fmt" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" - "github.com/apache/arrow/go/v14/parquet/metadata" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet/metadata" + "github.com/apache/arrow/go/v15/parquet/schema" "golang.org/x/xerrors" ) @@ -1629,7 +1630,11 @@ func (w *FixedLenByteArrayColumnChunkWriter) WriteDictIndices(indices arrow.Arra func (w *FixedLenByteArrayColumnChunkWriter) writeValues(values []parquet.FixedLenByteArray, numNulls int64) { w.currentEncoder.(encoding.FixedLenByteArrayEncoder).Put(values) if w.pageStatistics != nil { - w.pageStatistics.(*metadata.FixedLenByteArrayStatistics).Update(values, numNulls) + if w.Descr().LogicalType().Equals(schema.Float16LogicalType{}) { + w.pageStatistics.(*metadata.Float16Statistics).Update(values, numNulls) + } else { + w.pageStatistics.(*metadata.FixedLenByteArrayStatistics).Update(values, numNulls) + } } } @@ -1641,7 +1646,11 @@ func (w *FixedLenByteArrayColumnChunkWriter) writeValuesSpaced(spacedValues []pa } if w.pageStatistics != nil { nulls := numValues - numRead - w.pageStatistics.(*metadata.FixedLenByteArrayStatistics).UpdateSpaced(spacedValues, validBits, validBitsOffset, nulls) + if w.Descr().LogicalType().Equals(schema.Float16LogicalType{}) { + w.pageStatistics.(*metadata.Float16Statistics).UpdateSpaced(spacedValues, validBits, validBitsOffset, nulls) + } else { + w.pageStatistics.(*metadata.FixedLenByteArrayStatistics).UpdateSpaced(spacedValues, validBits, validBitsOffset, nulls) + } } } diff --git a/go/parquet/file/column_writer_types.gen.go.tmpl b/go/parquet/file/column_writer_types.gen.go.tmpl index c00e1dabb5fe6..70bcfe679eb92 100644 --- a/go/parquet/file/column_writer_types.gen.go.tmpl +++ b/go/parquet/file/column_writer_types.gen.go.tmpl @@ -18,11 +18,11 @@ package file import ( "fmt" - - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/metadata" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" + + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/metadata" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" ) {{range .In}} @@ -83,7 +83,7 @@ func (w *{{.Name}}ColumnChunkWriter) WriteBatch(values []{{.name}}, defLevels, r // writes a large number of values, the DataPage size can be much above the limit. // The purpose of this chunking is to bound this. Even if a user writes large number // of values, the chunking will ensure the AddDataPage() is called at a reasonable - // pagesize limit + // pagesize limit var n int64 switch { case defLevels != nil: @@ -107,7 +107,7 @@ func (w *{{.Name}}ColumnChunkWriter) WriteBatch(values []{{.name}}, defLevels, r valueOffset += toWrite w.checkDictionarySizeLimit() }) - return + return } // WriteBatchSpaced writes a batch of repetition levels, definition levels, and values to the @@ -132,7 +132,7 @@ func (w *{{.Name}}ColumnChunkWriter) WriteBatchSpaced(values []{{.name}}, defLev length = len(values) } doBatches(int64(length), w.props.WriteBatchSize(), func(offset, batch int64) { - var vals []{{.name}} + var vals []{{.name}} info := w.maybeCalculateValidityBits(levelSliceOrNil(defLevels, offset, batch), batch) w.writeLevelsSpaced(batch, levelSliceOrNil(defLevels, offset, batch), levelSliceOrNil(repLevels, offset, batch)) @@ -165,7 +165,7 @@ func (w *{{.Name}}ColumnChunkWriter) WriteDictIndices(indices arrow.Array, defLe } } }() - + valueOffset := int64(0) length := len(defLevels) if defLevels == nil { @@ -193,14 +193,22 @@ func (w *{{.Name}}ColumnChunkWriter) WriteDictIndices(indices arrow.Array, defLe valueOffset += info.numSpaced() }) - + return } func (w *{{.Name}}ColumnChunkWriter) writeValues(values []{{.name}}, numNulls int64) { w.currentEncoder.(encoding.{{.Name}}Encoder).Put(values) if w.pageStatistics != nil { +{{- if ne .Name "FixedLenByteArray"}} w.pageStatistics.(*metadata.{{.Name}}Statistics).Update(values, numNulls) +{{- else}} + if w.Descr().LogicalType().Equals(schema.Float16LogicalType{}) { + w.pageStatistics.(*metadata.Float16Statistics).Update(values, numNulls) + } else { + w.pageStatistics.(*metadata.{{.Name}}Statistics).Update(values, numNulls) + } +{{- end}} } } @@ -212,7 +220,15 @@ func (w *{{.Name}}ColumnChunkWriter) writeValuesSpaced(spacedValues []{{.name}}, } if w.pageStatistics != nil { nulls := numValues - numRead +{{- if ne .Name "FixedLenByteArray"}} w.pageStatistics.(*metadata.{{.Name}}Statistics).UpdateSpaced(spacedValues, validBits, validBitsOffset, nulls) +{{- else}} + if w.Descr().LogicalType().Equals(schema.Float16LogicalType{}) { + w.pageStatistics.(*metadata.Float16Statistics).UpdateSpaced(spacedValues, validBits, validBitsOffset, nulls) + } else { + w.pageStatistics.(*metadata.{{.Name}}Statistics).UpdateSpaced(spacedValues, validBits, validBitsOffset, nulls) + } +{{- end}} } } diff --git a/go/parquet/file/file_reader.go b/go/parquet/file/file_reader.go index 5481d5c7738b0..afff579ded5b7 100644 --- a/go/parquet/file/file_reader.go +++ b/go/parquet/file/file_reader.go @@ -25,10 +25,10 @@ import ( "runtime" "sync" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/encryption" - "github.com/apache/arrow/go/v14/parquet/metadata" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/encryption" + "github.com/apache/arrow/go/v15/parquet/metadata" "golang.org/x/xerrors" ) diff --git a/go/parquet/file/file_reader_mmap.go b/go/parquet/file/file_reader_mmap.go index 89c5a84cee2e1..03e12adf08c8c 100644 --- a/go/parquet/file/file_reader_mmap.go +++ b/go/parquet/file/file_reader_mmap.go @@ -22,7 +22,7 @@ package file import ( "io" - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/parquet" "golang.org/x/exp/mmap" "golang.org/x/xerrors" ) diff --git a/go/parquet/file/file_reader_mmap_windows.go b/go/parquet/file/file_reader_mmap_windows.go index 6b08422163fd1..06a9e97160fe0 100644 --- a/go/parquet/file/file_reader_mmap_windows.go +++ b/go/parquet/file/file_reader_mmap_windows.go @@ -22,7 +22,7 @@ package file import ( "errors" - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/parquet" ) func mmapOpen(filename string) (parquet.ReaderAtSeeker, error) { diff --git a/go/parquet/file/file_reader_test.go b/go/parquet/file/file_reader_test.go index eccb572b30040..f3248925cf5b9 100644 --- a/go/parquet/file/file_reader_test.go +++ b/go/parquet/file/file_reader_test.go @@ -21,18 +21,20 @@ import ( "crypto/rand" "encoding/binary" "io" + "os" + "path" "testing" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/compress" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/thrift" - "github.com/apache/arrow/go/v14/parquet/metadata" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/compress" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/thrift" + "github.com/apache/arrow/go/v15/parquet/metadata" + "github.com/apache/arrow/go/v15/parquet/schema" libthrift "github.com/apache/thrift/lib/go/thrift" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -385,3 +387,62 @@ func TestDeltaLengthByteArrayPackingWithNulls(t *testing.T) { assert.NotNil(t, readData[0]) } } + +func TestRleBooleanEncodingFileRead(t *testing.T) { + dir := os.Getenv("PARQUET_TEST_DATA") + if dir == "" { + t.Skip("no path supplied with PARQUET_TEST_DATA") + } + assert.DirExists(t, dir) + + props := parquet.NewReaderProperties(memory.DefaultAllocator) + fileReader, err := file.OpenParquetFile(path.Join(dir, "rle_boolean_encoding.parquet"), + false, file.WithReadProps(props)) + require.NoError(t, err) + defer fileReader.Close() + + assert.Equal(t, 1, fileReader.NumRowGroups()) + rgr := fileReader.RowGroup(0) + assert.EqualValues(t, 68, rgr.NumRows()) + + rdr, err := rgr.Column(0) + require.NoError(t, err) + brdr := rdr.(*file.BooleanColumnChunkReader) + + values := make([]bool, 68) + defLvls, repLvls := make([]int16, 68), make([]int16, 68) + total, read, err := brdr.ReadBatch(68, values, defLvls, repLvls) + require.NoError(t, err) + + assert.EqualValues(t, 68, total) + md, err := rgr.MetaData().ColumnChunk(0) + require.NoError(t, err) + stats, err := md.Statistics() + require.NoError(t, err) + assert.EqualValues(t, total-stats.NullCount(), read) + + expected := []bool{ + true, false, true, true, false, false, + true, true, true, false, false, true, true, + false, true, true, false, false, true, true, + false, true, true, false, false, true, true, + true, false, false, false, false, true, true, + false, true, true, false, false, true, true, + true, false, false, true, true, false, false, + true, true, true, false, true, true, false, + true, true, false, false, true, true, true, + } + expectedNulls := []int{2, 15, 23, 38, 48, 60} + + expectedNullIdx := 0 + for i, v := range defLvls { + if expectedNullIdx < len(expectedNulls) && i == expectedNulls[expectedNullIdx] { + assert.Zero(t, v) + expectedNullIdx++ + } else { + assert.EqualValues(t, 1, v) + } + } + + assert.Equal(t, expected, values[:len(expected)]) +} diff --git a/go/parquet/file/file_writer.go b/go/parquet/file/file_writer.go index c6289434bbe6e..1d7f7840dac50 100644 --- a/go/parquet/file/file_writer.go +++ b/go/parquet/file/file_writer.go @@ -21,11 +21,11 @@ import ( "fmt" "io" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/encryption" - "github.com/apache/arrow/go/v14/parquet/internal/utils" - "github.com/apache/arrow/go/v14/parquet/metadata" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/encryption" + "github.com/apache/arrow/go/v15/parquet/internal/utils" + "github.com/apache/arrow/go/v15/parquet/metadata" + "github.com/apache/arrow/go/v15/parquet/schema" ) // Writer is the primary interface for writing a parquet file @@ -121,6 +121,7 @@ func (fw *Writer) AppendRowGroup() SerialRowGroupWriter { func (fw *Writer) appendRowGroup(buffered bool) *rowGroupWriter { if fw.rowGroupWriter != nil { + fw.nrows += fw.rowGroupWriter.nrows fw.rowGroupWriter.Close() } fw.rowGroups++ diff --git a/go/parquet/file/file_writer_test.go b/go/parquet/file/file_writer_test.go index af083ebe60e4f..f32e403a8d534 100644 --- a/go/parquet/file/file_writer_test.go +++ b/go/parquet/file/file_writer_test.go @@ -22,13 +22,13 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/compress" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" - "github.com/apache/arrow/go/v14/parquet/internal/testutils" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/compress" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/parquet/internal/testutils" + "github.com/apache/arrow/go/v15/parquet/schema" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" @@ -97,6 +97,8 @@ func (t *SerializeTestSuite) fileSerializeTest(codec compress.Compression, expec writer.Close() nrows := t.numRowGroups * t.rowsPerRG + t.EqualValues(nrows, writer.NumRows()) + reader, err := file.NewParquetReader(bytes.NewReader(sink.Bytes())) t.NoError(err) t.Equal(t.numCols, reader.MetaData().Schema.NumColumns()) diff --git a/go/parquet/file/level_conversion.go b/go/parquet/file/level_conversion.go index ac53dcd18abae..c23bdda445963 100755 --- a/go/parquet/file/level_conversion.go +++ b/go/parquet/file/level_conversion.go @@ -22,11 +22,11 @@ import ( "math/bits" "unsafe" - shared_utils "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/bmi" - "github.com/apache/arrow/go/v14/parquet/internal/utils" - "github.com/apache/arrow/go/v14/parquet/schema" + shared_utils "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/bmi" + "github.com/apache/arrow/go/v15/parquet/internal/utils" + "github.com/apache/arrow/go/v15/parquet/schema" "golang.org/x/xerrors" ) diff --git a/go/parquet/file/level_conversion_test.go b/go/parquet/file/level_conversion_test.go index 343edb687980d..5d5bdde90dc7e 100644 --- a/go/parquet/file/level_conversion_test.go +++ b/go/parquet/file/level_conversion_test.go @@ -20,9 +20,9 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/parquet/internal/bmi" - "github.com/apache/arrow/go/v14/parquet/internal/utils" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/parquet/internal/bmi" + "github.com/apache/arrow/go/v15/parquet/internal/utils" "github.com/stretchr/testify/assert" ) diff --git a/go/parquet/file/page_reader.go b/go/parquet/file/page_reader.go index ab8679b127f1b..01f253aff8864 100644 --- a/go/parquet/file/page_reader.go +++ b/go/parquet/file/page_reader.go @@ -23,13 +23,13 @@ import ( "sync" "github.com/JohnCGriffin/overflow" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/compress" - "github.com/apache/arrow/go/v14/parquet/internal/encryption" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/thrift" - "github.com/apache/arrow/go/v14/parquet/metadata" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/compress" + "github.com/apache/arrow/go/v15/parquet/internal/encryption" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/thrift" + "github.com/apache/arrow/go/v15/parquet/metadata" "golang.org/x/xerrors" ) diff --git a/go/parquet/file/page_writer.go b/go/parquet/file/page_writer.go index b2b6d5061b0fc..c16476fbb232c 100644 --- a/go/parquet/file/page_writer.go +++ b/go/parquet/file/page_writer.go @@ -20,15 +20,15 @@ import ( "bytes" "sync" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/compress" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" - "github.com/apache/arrow/go/v14/parquet/internal/encryption" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/thrift" - "github.com/apache/arrow/go/v14/parquet/internal/utils" - "github.com/apache/arrow/go/v14/parquet/metadata" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/compress" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/parquet/internal/encryption" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/thrift" + "github.com/apache/arrow/go/v15/parquet/internal/utils" + "github.com/apache/arrow/go/v15/parquet/metadata" libthrift "github.com/apache/thrift/lib/go/thrift" "golang.org/x/xerrors" ) diff --git a/go/parquet/file/record_reader.go b/go/parquet/file/record_reader.go index 3311a6d77da39..ad836d29ef483 100755 --- a/go/parquet/file/record_reader.go +++ b/go/parquet/file/record_reader.go @@ -23,14 +23,14 @@ import ( "unsafe" "github.com/JohnCGriffin/overflow" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/parquet/schema" "golang.org/x/xerrors" ) diff --git a/go/parquet/file/row_group_reader.go b/go/parquet/file/row_group_reader.go index f1aebf1085a23..3c1c1edb0b484 100644 --- a/go/parquet/file/row_group_reader.go +++ b/go/parquet/file/row_group_reader.go @@ -20,10 +20,10 @@ import ( "fmt" "sync" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/encryption" - "github.com/apache/arrow/go/v14/parquet/metadata" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/encryption" + "github.com/apache/arrow/go/v15/parquet/metadata" "golang.org/x/xerrors" ) diff --git a/go/parquet/file/row_group_writer.go b/go/parquet/file/row_group_writer.go index 75f0dede5246f..935c13d13a67f 100644 --- a/go/parquet/file/row_group_writer.go +++ b/go/parquet/file/row_group_writer.go @@ -17,10 +17,10 @@ package file import ( - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/encryption" - "github.com/apache/arrow/go/v14/parquet/internal/utils" - "github.com/apache/arrow/go/v14/parquet/metadata" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/encryption" + "github.com/apache/arrow/go/v15/parquet/internal/utils" + "github.com/apache/arrow/go/v15/parquet/metadata" "golang.org/x/xerrors" ) diff --git a/go/parquet/file/row_group_writer_test.go b/go/parquet/file/row_group_writer_test.go index bc95e99d91582..2ea317cb91001 100644 --- a/go/parquet/file/row_group_writer_test.go +++ b/go/parquet/file/row_group_writer_test.go @@ -20,10 +20,10 @@ import ( "bytes" "testing" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/parquet/schema" "github.com/apache/thrift/lib/go/thrift" "github.com/stretchr/testify/assert" ) diff --git a/go/parquet/internal/bmi/bmi_test.go b/go/parquet/internal/bmi/bmi_test.go index 460c6ec4e24c6..a5278dfef2211 100644 --- a/go/parquet/internal/bmi/bmi_test.go +++ b/go/parquet/internal/bmi/bmi_test.go @@ -20,7 +20,7 @@ import ( "fmt" "testing" - "github.com/apache/arrow/go/v14/parquet/internal/bmi" + "github.com/apache/arrow/go/v15/parquet/internal/bmi" "github.com/stretchr/testify/assert" ) diff --git a/go/parquet/internal/encoding/boolean_decoder.go b/go/parquet/internal/encoding/boolean_decoder.go index dd213395d6324..3782dc85ea814 100644 --- a/go/parquet/internal/encoding/boolean_decoder.go +++ b/go/parquet/internal/encoding/boolean_decoder.go @@ -17,11 +17,16 @@ package encoding import ( - "github.com/apache/arrow/go/v14/arrow/bitutil" - shared_utils "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/utils" - "golang.org/x/xerrors" + "bytes" + "encoding/binary" + "errors" + "fmt" + "io" + + "github.com/apache/arrow/go/v15/arrow/bitutil" + shared_utils "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/utils" ) // PlainBooleanDecoder is for the Plain Encoding type, there is no @@ -103,7 +108,80 @@ func (dec *PlainBooleanDecoder) DecodeSpaced(out []bool, nullCount int, validBit return 0, err } if valuesRead != toRead { - return valuesRead, xerrors.New("parquet: boolean decoder: number of values / definition levels read did not match") + return valuesRead, errors.New("parquet: boolean decoder: number of values / definition levels read did not match") + } + return spacedExpand(out, nullCount, validBits, validBitsOffset), nil + } + return dec.Decode(out) +} + +type RleBooleanDecoder struct { + decoder + + rleDec *utils.RleDecoder +} + +func (RleBooleanDecoder) Type() parquet.Type { + return parquet.Types.Boolean +} + +func (dec *RleBooleanDecoder) SetData(nvals int, data []byte) error { + dec.nvals = nvals + + if len(data) < 4 { + return fmt.Errorf("invalid length - %d (corrupt data page?)", len(data)) + } + + // load the first 4 bytes in little-endian which indicates the length + nbytes := binary.LittleEndian.Uint32(data[:4]) + if nbytes > uint32(len(data)-4) { + return fmt.Errorf("received invalid number of bytes - %d (corrupt data page?)", nbytes) + } + + dec.data = data[4:] + if dec.rleDec == nil { + dec.rleDec = utils.NewRleDecoder(bytes.NewReader(dec.data), 1) + } else { + dec.rleDec.Reset(bytes.NewReader(dec.data), 1) + } + return nil +} + +func (dec *RleBooleanDecoder) Decode(out []bool) (int, error) { + max := shared_utils.MinInt(len(out), dec.nvals) + + var ( + buf [1024]uint64 + n = max + ) + + for n > 0 { + batch := shared_utils.MinInt(len(buf), n) + decoded := dec.rleDec.GetBatch(buf[:batch]) + if decoded != batch { + return max - n, io.ErrUnexpectedEOF + } + + for i := 0; i < batch; i++ { + out[i] = buf[i] != 0 + } + n -= batch + out = out[batch:] + } + + dec.nvals -= max + return max, nil +} + +func (dec *RleBooleanDecoder) DecodeSpaced(out []bool, nullCount int, validBits []byte, validBitsOffset int64) (int, error) { + if nullCount > 0 { + toRead := len(out) - nullCount + valuesRead, err := dec.Decode(out[:toRead]) + if err != nil { + return 0, err + } + if valuesRead != toRead { + return valuesRead, errors.New("parquet: rle boolean decoder: number of values / definition levels read did not match") } return spacedExpand(out, nullCount, validBits, validBitsOffset), nil } diff --git a/go/parquet/internal/encoding/boolean_encoder.go b/go/parquet/internal/encoding/boolean_encoder.go index 65ba2658b0637..3e01bde369d8b 100644 --- a/go/parquet/internal/encoding/boolean_encoder.go +++ b/go/parquet/internal/encoding/boolean_encoder.go @@ -17,9 +17,12 @@ package encoding import ( - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/utils" + "encoding/binary" + + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/debug" + "github.com/apache/arrow/go/v15/parquet/internal/utils" ) const ( @@ -87,3 +90,55 @@ func (enc *PlainBooleanEncoder) FlushValues() (Buffer, error) { return enc.sink.Finish(), nil } + +const rleLengthInBytes = 4 + +type RleBooleanEncoder struct { + encoder + + bufferedValues []bool +} + +func (RleBooleanEncoder) Type() parquet.Type { + return parquet.Types.Boolean +} + +func (enc *RleBooleanEncoder) Put(in []bool) { + enc.bufferedValues = append(enc.bufferedValues, in...) +} + +func (enc *RleBooleanEncoder) PutSpaced(in []bool, validBits []byte, validBitsOffset int64) { + bufferOut := make([]bool, len(in)) + nvalid := spacedCompress(in, bufferOut, validBits, validBitsOffset) + enc.Put(bufferOut[:nvalid]) +} + +func (enc *RleBooleanEncoder) EstimatedDataEncodedSize() int64 { + return rleLengthInBytes + int64(enc.maxRleBufferSize()) +} + +func (enc *RleBooleanEncoder) maxRleBufferSize() int { + return utils.MaxRLEBufferSize(1, len(enc.bufferedValues)) + + utils.MinRLEBufferSize(1) +} + +func (enc *RleBooleanEncoder) FlushValues() (Buffer, error) { + rleBufferSizeMax := enc.maxRleBufferSize() + enc.sink.SetOffset(rleLengthInBytes) + enc.sink.Reserve(rleBufferSizeMax) + + rleEncoder := utils.NewRleEncoder(enc.sink, 1) + for _, v := range enc.bufferedValues { + if v { + rleEncoder.Put(1) + } else { + rleEncoder.Put(0) + } + } + n := rleEncoder.Flush() + debug.Assert(n <= rleBufferSizeMax, "num encoded bytes larger than expected max") + buf := enc.sink.Finish() + binary.LittleEndian.PutUint32(buf.Bytes(), uint32(n)) + + return buf, nil +} diff --git a/go/parquet/internal/encoding/byte_array_decoder.go b/go/parquet/internal/encoding/byte_array_decoder.go index c3ca1de5888f7..82ce9f84265c5 100644 --- a/go/parquet/internal/encoding/byte_array_decoder.go +++ b/go/parquet/internal/encoding/byte_array_decoder.go @@ -19,12 +19,12 @@ package encoding import ( "encoding/binary" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - pqutils "github.com/apache/arrow/go/v14/parquet/internal/utils" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + pqutils "github.com/apache/arrow/go/v15/parquet/internal/utils" "golang.org/x/xerrors" ) diff --git a/go/parquet/internal/encoding/byte_array_encoder.go b/go/parquet/internal/encoding/byte_array_encoder.go index cb49178435a6f..9270b5531768c 100644 --- a/go/parquet/internal/encoding/byte_array_encoder.go +++ b/go/parquet/internal/encoding/byte_array_encoder.go @@ -21,11 +21,11 @@ import ( "fmt" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/internal/bitutils" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/internal/bitutils" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" ) // PlainByteArrayEncoder encodes byte arrays according to the spec for Plain encoding diff --git a/go/parquet/internal/encoding/decoder.go b/go/parquet/internal/encoding/decoder.go index b441868e1df85..cee624730e993 100644 --- a/go/parquet/internal/encoding/decoder.go +++ b/go/parquet/internal/encoding/decoder.go @@ -20,16 +20,16 @@ import ( "bytes" "reflect" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/bitutils" - shared_utils "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/debug" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/utils" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/bitutils" + shared_utils "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/debug" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/utils" + "github.com/apache/arrow/go/v15/parquet/schema" "golang.org/x/xerrors" ) diff --git a/go/parquet/internal/encoding/delta_bit_packing.go b/go/parquet/internal/encoding/delta_bit_packing.go index 1fb91634e977b..a00f3457cac7a 100644 --- a/go/parquet/internal/encoding/delta_bit_packing.go +++ b/go/parquet/internal/encoding/delta_bit_packing.go @@ -23,11 +23,11 @@ import ( "math/bits" "reflect" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/memory" - shared_utils "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/utils" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/memory" + shared_utils "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/utils" ) // see the deltaBitPack encoder for a description of the encoding format that is diff --git a/go/parquet/internal/encoding/delta_byte_array.go b/go/parquet/internal/encoding/delta_byte_array.go index 60a20a3ff0d72..b35d022fd5deb 100644 --- a/go/parquet/internal/encoding/delta_byte_array.go +++ b/go/parquet/internal/encoding/delta_byte_array.go @@ -17,9 +17,9 @@ package encoding import ( - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" "golang.org/x/xerrors" ) diff --git a/go/parquet/internal/encoding/delta_byte_array_test.go b/go/parquet/internal/encoding/delta_byte_array_test.go index 69c632bf69971..0a206796f742a 100644 --- a/go/parquet/internal/encoding/delta_byte_array_test.go +++ b/go/parquet/internal/encoding/delta_byte_array_test.go @@ -18,8 +18,8 @@ package encoding import ( "fmt" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" "github.com/stretchr/testify/assert" "testing" ) diff --git a/go/parquet/internal/encoding/delta_length_byte_array.go b/go/parquet/internal/encoding/delta_length_byte_array.go index 981a067b07e08..d5a99c187d11e 100644 --- a/go/parquet/internal/encoding/delta_length_byte_array.go +++ b/go/parquet/internal/encoding/delta_length_byte_array.go @@ -17,9 +17,9 @@ package encoding import ( - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" "golang.org/x/xerrors" ) diff --git a/go/parquet/internal/encoding/encoder.go b/go/parquet/internal/encoding/encoder.go index 9626e4e9ff97f..7023309397a3b 100644 --- a/go/parquet/internal/encoding/encoder.go +++ b/go/parquet/internal/encoding/encoder.go @@ -21,14 +21,14 @@ import ( "math/bits" "reflect" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/bitutils" - "github.com/apache/arrow/go/v14/parquet" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/utils" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/bitutils" + "github.com/apache/arrow/go/v15/parquet" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/utils" + "github.com/apache/arrow/go/v15/parquet/schema" ) //go:generate go run ../../../arrow/_tools/tmpl/main.go -i -data=physical_types.tmpldata plain_encoder_types.gen.go.tmpl typed_encoder.gen.go.tmpl @@ -244,7 +244,7 @@ func (d *dictEncoder) FlushValues() (Buffer, error) { // EstimatedDataEncodedSize returns the maximum number of bytes needed to store the RLE encoded indexes, not including the // dictionary index in the computation. func (d *dictEncoder) EstimatedDataEncodedSize() int64 { - return 1 + int64(utils.MaxBufferSize(d.BitWidth(), len(d.idxValues))+utils.MinBufferSize(d.BitWidth())) + return 1 + int64(utils.MaxRLEBufferSize(d.BitWidth(), len(d.idxValues))+utils.MinRLEBufferSize(d.BitWidth())) } // NumEntries returns the number of entires in the dictionary index for this encoder. diff --git a/go/parquet/internal/encoding/encoding_benchmarks_test.go b/go/parquet/internal/encoding/encoding_benchmarks_test.go index 0bb4e895f316d..e0645e9de54e4 100644 --- a/go/parquet/internal/encoding/encoding_benchmarks_test.go +++ b/go/parquet/internal/encoding/encoding_benchmarks_test.go @@ -21,14 +21,14 @@ import ( "math" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/hashing" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" - "github.com/apache/arrow/go/v14/parquet/internal/testutils" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/hashing" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/parquet/internal/testutils" + "github.com/apache/arrow/go/v15/parquet/schema" ) const ( diff --git a/go/parquet/internal/encoding/encoding_test.go b/go/parquet/internal/encoding/encoding_test.go index 50e72de004e19..48e2316b0f897 100644 --- a/go/parquet/internal/encoding/encoding_test.go +++ b/go/parquet/internal/encoding/encoding_test.go @@ -26,13 +26,13 @@ import ( "testing" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" - "github.com/apache/arrow/go/v14/parquet/internal/testutils" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/parquet/internal/testutils" + "github.com/apache/arrow/go/v15/parquet/schema" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" @@ -363,6 +363,16 @@ func (b *BaseEncodingTestSuite) TestBasicRoundTrip() { b.checkRoundTrip(parquet.Encodings.Plain) } +func (b *BaseEncodingTestSuite) TestRleBooleanEncodingRoundTrip() { + switch b.typ { + case reflect.TypeOf(true): + b.initData(2000, 200) + b.checkRoundTrip(parquet.Encodings.RLE) + default: + b.T().SkipNow() + } +} + func (b *BaseEncodingTestSuite) TestDeltaEncodingRoundTrip() { b.initData(10000, 1) @@ -408,6 +418,8 @@ func (b *BaseEncodingTestSuite) TestSpacedRoundTrip() { if validBits != nil { b.checkRoundTripSpaced(parquet.Encodings.Plain, validBits, validBitsOffset) switch b.typ { + case reflect.TypeOf(false): + b.checkRoundTripSpaced(parquet.Encodings.RLE, validBits, validBitsOffset) case reflect.TypeOf(int32(0)), reflect.TypeOf(int64(0)): b.checkRoundTripSpaced(parquet.Encodings.DeltaBinaryPacked, validBits, validBitsOffset) case reflect.TypeOf(parquet.ByteArray{}): diff --git a/go/parquet/internal/encoding/fixed_len_byte_array_decoder.go b/go/parquet/internal/encoding/fixed_len_byte_array_decoder.go index e4516bf4a1ac1..1e589fc2e7be1 100644 --- a/go/parquet/internal/encoding/fixed_len_byte_array_decoder.go +++ b/go/parquet/internal/encoding/fixed_len_byte_array_decoder.go @@ -19,8 +19,8 @@ package encoding import ( "math" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" "golang.org/x/xerrors" ) diff --git a/go/parquet/internal/encoding/fixed_len_byte_array_encoder.go b/go/parquet/internal/encoding/fixed_len_byte_array_encoder.go index b5aee337d5bf0..39202c8e25d9f 100644 --- a/go/parquet/internal/encoding/fixed_len_byte_array_encoder.go +++ b/go/parquet/internal/encoding/fixed_len_byte_array_encoder.go @@ -19,9 +19,9 @@ package encoding import ( "fmt" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/internal/bitutils" - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/internal/bitutils" + "github.com/apache/arrow/go/v15/parquet" ) // PlainFixedLenByteArrayEncoder writes the raw bytes of the byte array diff --git a/go/parquet/internal/encoding/levels.go b/go/parquet/internal/encoding/levels.go index c5622519b0ce4..2a6dc24933714 100644 --- a/go/parquet/internal/encoding/levels.go +++ b/go/parquet/internal/encoding/levels.go @@ -19,16 +19,16 @@ package encoding import ( "bytes" "encoding/binary" + "errors" "fmt" "math/bits" "github.com/JohnCGriffin/overflow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - shared_utils "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/utils" - "golang.org/x/xerrors" + "github.com/apache/arrow/go/v15/arrow/bitutil" + shared_utils "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/utils" ) // LevelEncoder is for handling the encoding of Definition and Repetition levels @@ -48,7 +48,7 @@ func LevelEncodingMaxBufferSize(encoding parquet.Encoding, maxLvl int16, nbuffer nbytes := 0 switch encoding { case parquet.Encodings.RLE: - nbytes = utils.MaxBufferSize(bitWidth, nbuffered) + utils.MinBufferSize(bitWidth) + nbytes = utils.MaxRLEBufferSize(bitWidth, nbuffered) + utils.MinRLEBufferSize(bitWidth) case parquet.Encodings.BitPacked: nbytes = int(bitutil.BytesForBits(int64(nbuffered * bitWidth))) default: @@ -194,12 +194,12 @@ func (l *LevelDecoder) SetData(encoding parquet.Encoding, maxLvl int16, nbuffere switch encoding { case parquet.Encodings.RLE: if len(data) < 4 { - return 0, xerrors.New("parquet: received invalid levels (corrupt data page?)") + return 0, errors.New("parquet: received invalid levels (corrupt data page?)") } nbytes := int32(binary.LittleEndian.Uint32(data[:4])) if nbytes < 0 || nbytes > int32(len(data)-4) { - return 0, xerrors.New("parquet: received invalid number of bytes (corrupt data page?)") + return 0, errors.New("parquet: received invalid number of bytes (corrupt data page?)") } buf := data[4:] @@ -212,12 +212,12 @@ func (l *LevelDecoder) SetData(encoding parquet.Encoding, maxLvl int16, nbuffere case parquet.Encodings.BitPacked: nbits, ok := overflow.Mul(nbuffered, l.bitWidth) if !ok { - return 0, xerrors.New("parquet: number of buffered values too large (corrupt data page?)") + return 0, errors.New("parquet: number of buffered values too large (corrupt data page?)") } nbytes := bitutil.BytesForBits(int64(nbits)) if nbytes < 0 || nbytes > int64(len(data)) { - return 0, xerrors.New("parquet: recieved invalid number of bytes (corrupt data page?)") + return 0, errors.New("parquet: received invalid number of bytes (corrupt data page?)") } if l.bit == nil { l.bit = utils.NewBitReader(bytes.NewReader(data)) @@ -234,7 +234,7 @@ func (l *LevelDecoder) SetData(encoding parquet.Encoding, maxLvl int16, nbuffere // run length encoding. func (l *LevelDecoder) SetDataV2(nbytes int32, maxLvl int16, nbuffered int, data []byte) error { if nbytes < 0 { - return xerrors.New("parquet: invalid page header (corrupt data page?)") + return errors.New("parquet: invalid page header (corrupt data page?)") } l.maxLvl = maxLvl diff --git a/go/parquet/internal/encoding/levels_test.go b/go/parquet/internal/encoding/levels_test.go index ae3a3a8c497fc..304ce32b3106d 100644 --- a/go/parquet/internal/encoding/levels_test.go +++ b/go/parquet/internal/encoding/levels_test.go @@ -21,11 +21,11 @@ import ( "strconv" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" "github.com/stretchr/testify/assert" ) diff --git a/go/parquet/internal/encoding/memo_table.go b/go/parquet/internal/encoding/memo_table.go index 170eab47d6160..810e8633b886e 100644 --- a/go/parquet/internal/encoding/memo_table.go +++ b/go/parquet/internal/encoding/memo_table.go @@ -20,11 +20,11 @@ import ( "math" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/hashing" - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/hashing" + "github.com/apache/arrow/go/v15/parquet" ) //go:generate go run ../../../arrow/_tools/tmpl/main.go -i -data=physical_types.tmpldata memo_table_types.gen.go.tmpl diff --git a/go/parquet/internal/encoding/memo_table_test.go b/go/parquet/internal/encoding/memo_table_test.go index 1eb61cb2559a3..1b9337010f855 100644 --- a/go/parquet/internal/encoding/memo_table_test.go +++ b/go/parquet/internal/encoding/memo_table_test.go @@ -20,11 +20,11 @@ import ( "math" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/hashing" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/hashing" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" "github.com/stretchr/testify/suite" ) diff --git a/go/parquet/internal/encoding/memo_table_types.gen.go b/go/parquet/internal/encoding/memo_table_types.gen.go index c9af6178bbb98..4da2721437814 100644 --- a/go/parquet/internal/encoding/memo_table_types.gen.go +++ b/go/parquet/internal/encoding/memo_table_types.gen.go @@ -19,8 +19,8 @@ package encoding import ( - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" ) // standard map based implementation of memo tables which can be more efficient diff --git a/go/parquet/internal/encoding/memo_table_types.gen.go.tmpl b/go/parquet/internal/encoding/memo_table_types.gen.go.tmpl index 7e88238d67b71..75335f25ff1f7 100644 --- a/go/parquet/internal/encoding/memo_table_types.gen.go.tmpl +++ b/go/parquet/internal/encoding/memo_table_types.gen.go.tmpl @@ -17,7 +17,7 @@ package encoding import ( - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/parquet" ) // standard map based implementation of memo tables which can be more efficient diff --git a/go/parquet/internal/encoding/plain_encoder_types.gen.go b/go/parquet/internal/encoding/plain_encoder_types.gen.go index 23db13b7a4121..09403d74cb06f 100644 --- a/go/parquet/internal/encoding/plain_encoder_types.gen.go +++ b/go/parquet/internal/encoding/plain_encoder_types.gen.go @@ -24,11 +24,11 @@ import ( "fmt" "math" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/endian" - "github.com/apache/arrow/go/v14/internal/bitutils" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/endian" + "github.com/apache/arrow/go/v15/internal/bitutils" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" "golang.org/x/xerrors" ) diff --git a/go/parquet/internal/encoding/plain_encoder_types.gen.go.tmpl b/go/parquet/internal/encoding/plain_encoder_types.gen.go.tmpl index e45419050302d..2838c63a41857 100644 --- a/go/parquet/internal/encoding/plain_encoder_types.gen.go.tmpl +++ b/go/parquet/internal/encoding/plain_encoder_types.gen.go.tmpl @@ -20,10 +20,10 @@ import ( "encoding/binary" "fmt" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/internal/bitutils" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/internal/bitutils" ) var ( diff --git a/go/parquet/internal/encoding/typed_encoder.gen.go b/go/parquet/internal/encoding/typed_encoder.gen.go index 7c1f954632f06..25fa309e0a38f 100644 --- a/go/parquet/internal/encoding/typed_encoder.gen.go +++ b/go/parquet/internal/encoding/typed_encoder.gen.go @@ -22,15 +22,15 @@ import ( "fmt" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/bitutils" - shared_utils "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/utils" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/bitutils" + shared_utils "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/utils" + "github.com/apache/arrow/go/v15/parquet/schema" "golang.org/x/xerrors" ) @@ -1225,6 +1225,8 @@ func (boolEncoderTraits) Encoder(e format.Encoding, useDict bool, descr *schema. switch e { case format.Encoding_PLAIN: return &PlainBooleanEncoder{encoder: newEncoderBase(e, descr, mem)} + case format.Encoding_RLE: + return &RleBooleanEncoder{encoder: newEncoderBase(e, descr, mem)} default: panic("unimplemented encoding type") } @@ -1248,6 +1250,8 @@ func (boolDecoderTraits) Decoder(e parquet.Encoding, descr *schema.Column, useDi switch e { case parquet.Encodings.Plain: return &PlainBooleanDecoder{decoder: newDecoderBase(format.Encoding(e), descr)} + case parquet.Encodings.RLE: + return &RleBooleanDecoder{decoder: newDecoderBase(format.Encoding(e), descr)} default: panic("unimplemented encoding type") } diff --git a/go/parquet/internal/encoding/typed_encoder.gen.go.tmpl b/go/parquet/internal/encoding/typed_encoder.gen.go.tmpl index f0d4fb50ae2fc..66c2649d599c6 100644 --- a/go/parquet/internal/encoding/typed_encoder.gen.go.tmpl +++ b/go/parquet/internal/encoding/typed_encoder.gen.go.tmpl @@ -17,13 +17,13 @@ package encoding import ( - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/schema" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/parquet/internal/utils" - shared_utils "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/internal/bitutils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/schema" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/parquet/internal/utils" + shared_utils "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/internal/bitutils" ) // fully typed encoder interfaces to enable writing against encoder/decoders @@ -73,6 +73,10 @@ func ({{.lower}}EncoderTraits) Encoder(e format.Encoding, useDict bool, descr *s switch e { case format.Encoding_PLAIN: return &Plain{{.Name}}Encoder{encoder: newEncoderBase(e, descr, mem)} +{{- if eq .Name "Boolean" }} + case format.Encoding_RLE: + return &RleBooleanEncoder{encoder: newEncoderBase(e, descr, mem)} +{{- end}} {{- if or (eq .Name "Int32") (eq .Name "Int64")}} case format.Encoding_DELTA_BINARY_PACKED: return DeltaBitPack{{.Name}}Encoder{&deltaBitPackEncoder{ @@ -117,6 +121,10 @@ func ({{.lower}}DecoderTraits) Decoder(e parquet.Encoding, descr *schema.Column, switch e { case parquet.Encodings.Plain: return &Plain{{.Name}}Decoder{decoder: newDecoderBase(format.Encoding(e), descr)} +{{- if eq .Name "Boolean" }} + case parquet.Encodings.RLE: + return &RleBooleanDecoder{decoder: newDecoderBase(format.Encoding(e), descr)} +{{- end}} {{- if or (eq .Name "Int32") (eq .Name "Int64")}} case parquet.Encodings.DeltaBinaryPacked: if mem == nil { diff --git a/go/parquet/internal/encoding/types.go b/go/parquet/internal/encoding/types.go index 2d13a9c8f877a..4ab3ab1a1c954 100644 --- a/go/parquet/internal/encoding/types.go +++ b/go/parquet/internal/encoding/types.go @@ -20,11 +20,11 @@ import ( "io" "sync" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" "golang.org/x/xerrors" ) diff --git a/go/parquet/internal/encryption/aes.go b/go/parquet/internal/encryption/aes.go index cc0cea0112fee..b6e9130ef81cd 100644 --- a/go/parquet/internal/encryption/aes.go +++ b/go/parquet/internal/encryption/aes.go @@ -29,7 +29,7 @@ import ( "fmt" "io" - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/parquet" ) // important constants for handling the aes encryption diff --git a/go/parquet/internal/encryption/decryptor.go b/go/parquet/internal/encryption/decryptor.go index 62a4f2ee9e56e..658559e6082c4 100644 --- a/go/parquet/internal/encryption/decryptor.go +++ b/go/parquet/internal/encryption/decryptor.go @@ -19,8 +19,8 @@ package encryption import ( "io" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" ) // FileDecryptor is an interface used by the filereader for decrypting an diff --git a/go/parquet/internal/encryption/encryptor.go b/go/parquet/internal/encryption/encryptor.go index 2ef09e558a23f..bdbae4740a44f 100644 --- a/go/parquet/internal/encryption/encryptor.go +++ b/go/parquet/internal/encryption/encryptor.go @@ -19,8 +19,8 @@ package encryption import ( "io" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" ) // FileEncryptor is the interface for constructing encryptors for the different diff --git a/go/parquet/internal/gen-go/parquet/GoUnusedProtection__.go b/go/parquet/internal/gen-go/parquet/GoUnusedProtection__.go index 1de0c8dee4cbd..01f1eb5aa99e3 100644 --- a/go/parquet/internal/gen-go/parquet/GoUnusedProtection__.go +++ b/go/parquet/internal/gen-go/parquet/GoUnusedProtection__.go @@ -1,4 +1,4 @@ -// Code generated by Thrift Compiler (0.16.0). DO NOT EDIT. +// Code generated by Thrift Compiler (0.18.1). DO NOT EDIT. package parquet diff --git a/go/parquet/internal/gen-go/parquet/parquet-consts.go b/go/parquet/internal/gen-go/parquet/parquet-consts.go index d4a63b22b890a..ab0a73c596e7d 100644 --- a/go/parquet/internal/gen-go/parquet/parquet-consts.go +++ b/go/parquet/internal/gen-go/parquet/parquet-consts.go @@ -1,21 +1,28 @@ -// Code generated by Thrift Compiler (0.16.0). DO NOT EDIT. +// Code generated by Thrift Compiler (0.18.1). DO NOT EDIT. package parquet import ( "bytes" "context" + "errors" "fmt" "time" thrift "github.com/apache/thrift/lib/go/thrift" + "strings" + "regexp" ) // (needed to ensure safety because of naive import list construction.) var _ = thrift.ZERO var _ = fmt.Printf +var _ = errors.New var _ = context.Background var _ = time.Now var _ = bytes.Equal +// (needed by validator.) +var _ = strings.Contains +var _ = regexp.MatchString func init() { diff --git a/go/parquet/internal/gen-go/parquet/parquet.go b/go/parquet/internal/gen-go/parquet/parquet.go index d4508f8e4529f..9dcedae8888d3 100644 --- a/go/parquet/internal/gen-go/parquet/parquet.go +++ b/go/parquet/internal/gen-go/parquet/parquet.go @@ -1,4 +1,4 @@ -// Code generated by Thrift Compiler (0.16.0). DO NOT EDIT. +// Code generated by Thrift Compiler (0.18.1). DO NOT EDIT. package parquet @@ -10,14 +10,20 @@ import ( "fmt" "time" thrift "github.com/apache/thrift/lib/go/thrift" + "strings" + "regexp" ) // (needed to ensure safety because of naive import list construction.) var _ = thrift.ZERO var _ = fmt.Printf +var _ = errors.New var _ = context.Background var _ = time.Now var _ = bytes.Equal +// (needed by validator.) +var _ = strings.Contains +var _ = regexp.MatchString //Types supported by Parquet. These types are intended to be used in combination //with the encodings to control the on disk storage format. @@ -94,9 +100,10 @@ func (p * Type) Value() (driver.Value, error) { } return int64(*p), nil } -//Common types used by frameworks(e.g. hive, pig) using parquet. This helps map -//between types in those frameworks to the base types in parquet. This is only -//metadata and not needed to read or write the data. +//DEPRECATED: Common types used by frameworks(e.g. hive, pig) using parquet. +//ConvertedType is superseded by LogicalType. This enum should not be extended. +// +//See LogicalTypes.md for conversion between ConvertedType and LogicalType. type ConvertedType int64 const ( ConvertedType_UTF8 ConvertedType = 0 @@ -897,6 +904,9 @@ func (p *Statistics) String() string { return fmt.Sprintf("Statistics(%+v)", *p) } +func (p *Statistics) Validate() error { + return nil +} // Empty structs to use as logical type annotations type StringType struct { } @@ -958,6 +968,9 @@ func (p *StringType) String() string { return fmt.Sprintf("StringType(%+v)", *p) } +func (p *StringType) Validate() error { + return nil +} type UUIDType struct { } @@ -1018,6 +1031,9 @@ func (p *UUIDType) String() string { return fmt.Sprintf("UUIDType(%+v)", *p) } +func (p *UUIDType) Validate() error { + return nil +} type MapType struct { } @@ -1078,6 +1094,9 @@ func (p *MapType) String() string { return fmt.Sprintf("MapType(%+v)", *p) } +func (p *MapType) Validate() error { + return nil +} type ListType struct { } @@ -1138,6 +1157,9 @@ func (p *ListType) String() string { return fmt.Sprintf("ListType(%+v)", *p) } +func (p *ListType) Validate() error { + return nil +} type EnumType struct { } @@ -1198,6 +1220,9 @@ func (p *EnumType) String() string { return fmt.Sprintf("EnumType(%+v)", *p) } +func (p *EnumType) Validate() error { + return nil +} type DateType struct { } @@ -1258,6 +1283,72 @@ func (p *DateType) String() string { return fmt.Sprintf("DateType(%+v)", *p) } +func (p *DateType) Validate() error { + return nil +} +type Float16Type struct { +} + +func NewFloat16Type() *Float16Type { + return &Float16Type{} +} + +func (p *Float16Type) Read(ctx context.Context, iprot thrift.TProtocol) error { + if _, err := iprot.ReadStructBegin(ctx); err != nil { + return thrift.PrependError(fmt.Sprintf("%T read error: ", p), err) + } + + + for { + _, fieldTypeId, fieldId, err := iprot.ReadFieldBegin(ctx) + if err != nil { + return thrift.PrependError(fmt.Sprintf("%T field %d read error: ", p, fieldId), err) + } + if fieldTypeId == thrift.STOP { break; } + if err := iprot.Skip(ctx, fieldTypeId); err != nil { + return err + } + if err := iprot.ReadFieldEnd(ctx); err != nil { + return err + } + } + if err := iprot.ReadStructEnd(ctx); err != nil { + return thrift.PrependError(fmt.Sprintf("%T read struct end error: ", p), err) + } + return nil +} + +func (p *Float16Type) Write(ctx context.Context, oprot thrift.TProtocol) error { + if err := oprot.WriteStructBegin(ctx, "Float16Type"); err != nil { + return thrift.PrependError(fmt.Sprintf("%T write struct begin error: ", p), err) } + if p != nil { + } + if err := oprot.WriteFieldStop(ctx); err != nil { + return thrift.PrependError("write field stop error: ", err) } + if err := oprot.WriteStructEnd(ctx); err != nil { + return thrift.PrependError("write struct stop error: ", err) } + return nil +} + +func (p *Float16Type) Equals(other *Float16Type) bool { + if p == other { + return true + } else if p == nil || other == nil { + return false + } + return true +} + +func (p *Float16Type) String() string { + if p == nil { + return "" + } + return fmt.Sprintf("Float16Type(%+v)", *p) +} + +func (p *Float16Type) Validate() error { + return nil +} // Logical type to annotate a column that is always null. // // Sometimes when discovering the schema of existing data, values are always @@ -1323,6 +1414,9 @@ func (p *NullType) String() string { return fmt.Sprintf("NullType(%+v)", *p) } +func (p *NullType) Validate() error { + return nil +} // Decimal logical type annotation // // To maintain forward-compatibility in v1, implementations using this logical @@ -1478,6 +1572,9 @@ func (p *DecimalType) String() string { return fmt.Sprintf("DecimalType(%+v)", *p) } +func (p *DecimalType) Validate() error { + return nil +} // Time units for logical types type MilliSeconds struct { } @@ -1539,6 +1636,9 @@ func (p *MilliSeconds) String() string { return fmt.Sprintf("MilliSeconds(%+v)", *p) } +func (p *MilliSeconds) Validate() error { + return nil +} type MicroSeconds struct { } @@ -1599,6 +1699,9 @@ func (p *MicroSeconds) String() string { return fmt.Sprintf("MicroSeconds(%+v)", *p) } +func (p *MicroSeconds) Validate() error { + return nil +} type NanoSeconds struct { } @@ -1659,6 +1762,9 @@ func (p *NanoSeconds) String() string { return fmt.Sprintf("NanoSeconds(%+v)", *p) } +func (p *NanoSeconds) Validate() error { + return nil +} // Attributes: // - MILLIS // - MICROS @@ -1879,6 +1985,9 @@ func (p *TimeUnit) String() string { return fmt.Sprintf("TimeUnit(%+v)", *p) } +func (p *TimeUnit) Validate() error { + return nil +} // Timestamp logical type annotation // // Allowed for physical types: INT64 @@ -2038,6 +2147,9 @@ func (p *TimestampType) String() string { return fmt.Sprintf("TimestampType(%+v)", *p) } +func (p *TimestampType) Validate() error { + return nil +} // Time logical type annotation // // Allowed for physical types: INT32 (millis), INT64 (micros, nanos) @@ -2197,6 +2309,9 @@ func (p *TimeType) String() string { return fmt.Sprintf("TimeType(%+v)", *p) } +func (p *TimeType) Validate() error { + return nil +} // Integer logical type annotation // // bitWidth must be 8, 16, 32, or 64. @@ -2352,6 +2467,9 @@ func (p *IntType) String() string { return fmt.Sprintf("IntType(%+v)", *p) } +func (p *IntType) Validate() error { + return nil +} // Embedded JSON logical type annotation // // Allowed for physical types: BINARY @@ -2415,6 +2533,9 @@ func (p *JsonType) String() string { return fmt.Sprintf("JsonType(%+v)", *p) } +func (p *JsonType) Validate() error { + return nil +} // Embedded BSON logical type annotation // // Allowed for physical types: BINARY @@ -2478,11 +2599,14 @@ func (p *BsonType) String() string { return fmt.Sprintf("BsonType(%+v)", *p) } +func (p *BsonType) Validate() error { + return nil +} // LogicalType annotations to replace ConvertedType. // // To maintain compatibility, implementations using LogicalType for a -// SchemaElement must also set the corresponding ConvertedType from the -// following table. +// SchemaElement must also set the corresponding ConvertedType (if any) +// from the following table. // // Attributes: // - STRING @@ -2498,6 +2622,7 @@ func (p *BsonType) String() string { // - JSON // - BSON // - UUID +// - FLOAT16 type LogicalType struct { STRING *StringType `thrift:"STRING,1" db:"STRING" json:"STRING,omitempty"` MAP *MapType `thrift:"MAP,2" db:"MAP" json:"MAP,omitempty"` @@ -2513,6 +2638,7 @@ type LogicalType struct { JSON *JsonType `thrift:"JSON,12" db:"JSON" json:"JSON,omitempty"` BSON *BsonType `thrift:"BSON,13" db:"BSON" json:"BSON,omitempty"` UUID *UUIDType `thrift:"UUID,14" db:"UUID" json:"UUID,omitempty"` + FLOAT16 *Float16Type `thrift:"FLOAT16,15" db:"FLOAT16" json:"FLOAT16,omitempty"` } func NewLogicalType() *LogicalType { @@ -2610,6 +2736,13 @@ func (p *LogicalType) GetUUID() *UUIDType { } return p.UUID } +var LogicalType_FLOAT16_DEFAULT *Float16Type +func (p *LogicalType) GetFLOAT16() *Float16Type { + if !p.IsSetFLOAT16() { + return LogicalType_FLOAT16_DEFAULT + } +return p.FLOAT16 +} func (p *LogicalType) CountSetFieldsLogicalType() int { count := 0 if (p.IsSetSTRING()) { @@ -2651,6 +2784,9 @@ func (p *LogicalType) CountSetFieldsLogicalType() int { if (p.IsSetUUID()) { count++ } + if (p.IsSetFLOAT16()) { + count++ + } return count } @@ -2707,6 +2843,10 @@ func (p *LogicalType) IsSetUUID() bool { return p.UUID != nil } +func (p *LogicalType) IsSetFLOAT16() bool { + return p.FLOAT16 != nil +} + func (p *LogicalType) Read(ctx context.Context, iprot thrift.TProtocol) error { if _, err := iprot.ReadStructBegin(ctx); err != nil { return thrift.PrependError(fmt.Sprintf("%T read error: ", p), err) @@ -2850,6 +2990,16 @@ func (p *LogicalType) Read(ctx context.Context, iprot thrift.TProtocol) error { return err } } + case 15: + if fieldTypeId == thrift.STRUCT { + if err := p.ReadField15(ctx, iprot); err != nil { + return err + } + } else { + if err := iprot.Skip(ctx, fieldTypeId); err != nil { + return err + } + } default: if err := iprot.Skip(ctx, fieldTypeId); err != nil { return err @@ -2969,6 +3119,14 @@ func (p *LogicalType) ReadField14(ctx context.Context, iprot thrift.TProtocol) return nil } +func (p *LogicalType) ReadField15(ctx context.Context, iprot thrift.TProtocol) error { + p.FLOAT16 = &Float16Type{} + if err := p.FLOAT16.Read(ctx, iprot); err != nil { + return thrift.PrependError(fmt.Sprintf("%T error reading struct: ", p.FLOAT16), err) + } + return nil +} + func (p *LogicalType) Write(ctx context.Context, oprot thrift.TProtocol) error { if c := p.CountSetFieldsLogicalType(); c != 1 { return fmt.Errorf("%T write union: exactly one field must be set (%d set)", p, c) @@ -2989,6 +3147,7 @@ func (p *LogicalType) Write(ctx context.Context, oprot thrift.TProtocol) error { if err := p.writeField12(ctx, oprot); err != nil { return err } if err := p.writeField13(ctx, oprot); err != nil { return err } if err := p.writeField14(ctx, oprot); err != nil { return err } + if err := p.writeField15(ctx, oprot); err != nil { return err } } if err := oprot.WriteFieldStop(ctx); err != nil { return thrift.PrependError("write field stop error: ", err) } @@ -3166,6 +3325,19 @@ func (p *LogicalType) writeField14(ctx context.Context, oprot thrift.TProtocol) return err } +func (p *LogicalType) writeField15(ctx context.Context, oprot thrift.TProtocol) (err error) { + if p.IsSetFLOAT16() { + if err := oprot.WriteFieldBegin(ctx, "FLOAT16", thrift.STRUCT, 15); err != nil { + return thrift.PrependError(fmt.Sprintf("%T write field begin error 15:FLOAT16: ", p), err) } + if err := p.FLOAT16.Write(ctx, oprot); err != nil { + return thrift.PrependError(fmt.Sprintf("%T error writing struct: ", p.FLOAT16), err) + } + if err := oprot.WriteFieldEnd(ctx); err != nil { + return thrift.PrependError(fmt.Sprintf("%T write field end error 15:FLOAT16: ", p), err) } + } + return err +} + func (p *LogicalType) Equals(other *LogicalType) bool { if p == other { return true @@ -3185,6 +3357,7 @@ func (p *LogicalType) Equals(other *LogicalType) bool { if !p.JSON.Equals(other.JSON) { return false } if !p.BSON.Equals(other.BSON) { return false } if !p.UUID.Equals(other.UUID) { return false } + if !p.FLOAT16.Equals(other.FLOAT16) { return false } return true } @@ -3195,6 +3368,9 @@ func (p *LogicalType) String() string { return fmt.Sprintf("LogicalType(%+v)", *p) } +func (p *LogicalType) Validate() error { + return nil +} // Represents a element inside a schema definition. // - if it is a group (inner node) then type is undefined and num_children is defined // - if it is a primitive type (leaf) then type is defined and num_children is undefined @@ -3202,7 +3378,7 @@ func (p *LogicalType) String() string { // // Attributes: // - Type: Data type for this field. Not set if the current element is a non-leaf node -// - TypeLength: If type is FIXED_LEN_BYTE_ARRAY, this is the byte length of the vales. +// - TypeLength: If type is FIXED_LEN_BYTE_ARRAY, this is the byte length of the values. // Otherwise, if specified, this is the maximum bit length to store any of the values. // (e.g. a low cardinality INT col could have this set to 3). Note that this is // in the schema, and therefore fixed for the entire file. @@ -3213,10 +3389,14 @@ func (p *LogicalType) String() string { // the nesting is flattened to a single list by a depth-first traversal. // The children count is used to construct the nested relationship. // This field is not set when the element is a primitive type -// - ConvertedType: When the schema is the result of a conversion from another model +// - ConvertedType: DEPRECATED: When the schema is the result of a conversion from another model. // Used to record the original type to help with cross conversion. -// - Scale: Used when this column contains decimal data. +// +// This is superseded by logicalType. +// - Scale: DEPRECATED: Used when this column contains decimal data. // See the DECIMAL converted type for more details. +// +// This is superseded by using the DecimalType annotation in logicalType. // - Precision // - FieldID: When the original schema supports field ids, this will save the // original field id in the parquet schema @@ -3776,6 +3956,9 @@ func (p *SchemaElement) String() string { return fmt.Sprintf("SchemaElement(%+v)", *p) } +func (p *SchemaElement) Validate() error { + return nil +} // Data page header // // Attributes: @@ -4059,6 +4242,9 @@ func (p *DataPageHeader) String() string { return fmt.Sprintf("DataPageHeader(%+v)", *p) } +func (p *DataPageHeader) Validate() error { + return nil +} type IndexPageHeader struct { } @@ -4119,6 +4305,14 @@ func (p *IndexPageHeader) String() string { return fmt.Sprintf("IndexPageHeader(%+v)", *p) } +func (p *IndexPageHeader) Validate() error { + return nil +} +// The dictionary page must be placed at the first position of the column chunk +// if it is partly or completely dictionary encoded. At most one dictionary page +// can be placed in a column chunk. +// +// // Attributes: // - NumValues: Number of values in the dictionary * // - Encoding: Encoding using this dictionary page * @@ -4319,6 +4513,9 @@ func (p *DictionaryPageHeader) String() string { return fmt.Sprintf("DictionaryPageHeader(%+v)", *p) } +func (p *DictionaryPageHeader) Validate() error { + return nil +} // New page format allowing reading levels without decompressing the data // Repetition and definition levels are uncompressed // The remaining section containing the data is compressed if is_compressed is true @@ -4738,6 +4935,9 @@ func (p *DataPageHeaderV2) String() string { return fmt.Sprintf("DataPageHeaderV2(%+v)", *p) } +func (p *DataPageHeaderV2) Validate() error { + return nil +} // Block-based algorithm type annotation. * type SplitBlockAlgorithm struct { } @@ -4799,6 +4999,9 @@ func (p *SplitBlockAlgorithm) String() string { return fmt.Sprintf("SplitBlockAlgorithm(%+v)", *p) } +func (p *SplitBlockAlgorithm) Validate() error { + return nil +} // The algorithm used in Bloom filter. * // // Attributes: @@ -4923,6 +5126,9 @@ func (p *BloomFilterAlgorithm) String() string { return fmt.Sprintf("BloomFilterAlgorithm(%+v)", *p) } +func (p *BloomFilterAlgorithm) Validate() error { + return nil +} // Hash strategy type annotation. xxHash is an extremely fast non-cryptographic hash // algorithm. It uses 64 bits version of xxHash. // @@ -4986,6 +5192,9 @@ func (p *XxHash) String() string { return fmt.Sprintf("XxHash(%+v)", *p) } +func (p *XxHash) Validate() error { + return nil +} // The hash function used in Bloom filter. This function takes the hash of a column value // using plain encoding. // @@ -5112,6 +5321,9 @@ func (p *BloomFilterHash) String() string { return fmt.Sprintf("BloomFilterHash(%+v)", *p) } +func (p *BloomFilterHash) Validate() error { + return nil +} // The compression used in the Bloom filter. // type Uncompressed struct { @@ -5174,6 +5386,9 @@ func (p *Uncompressed) String() string { return fmt.Sprintf("Uncompressed(%+v)", *p) } +func (p *Uncompressed) Validate() error { + return nil +} // Attributes: // - UNCOMPRESSED type BloomFilterCompression struct { @@ -5296,6 +5511,9 @@ func (p *BloomFilterCompression) String() string { return fmt.Sprintf("BloomFilterCompression(%+v)", *p) } +func (p *BloomFilterCompression) Validate() error { + return nil +} // Bloom filter header is stored at beginning of Bloom filter data of each column // and followed by its bitset. // @@ -5553,36 +5771,29 @@ func (p *BloomFilterHeader) String() string { return fmt.Sprintf("BloomFilterHeader(%+v)", *p) } +func (p *BloomFilterHeader) Validate() error { + return nil +} // Attributes: // - Type: the type of the page: indicates which of the *_header fields is set * // - UncompressedPageSize: Uncompressed page size in bytes (not including this header) * // - CompressedPageSize: Compressed (and potentially encrypted) page size in bytes, not including this header * -// - Crc: The 32bit CRC for the page, to be be calculated as follows: -// - Using the standard CRC32 algorithm -// - On the data only, i.e. this header should not be included. 'Data' -// hereby refers to the concatenation of the repetition levels, the -// definition levels and the column value, in this exact order. -// - On the encoded versions of the repetition levels, definition levels and -// column values -// - On the compressed versions of the repetition levels, definition levels -// and column values where possible; -// - For v1 data pages, the repetition levels, definition levels and column -// values are always compressed together. If a compression scheme is -// specified, the CRC shall be calculated on the compressed version of -// this concatenation. If no compression scheme is specified, the CRC -// shall be calculated on the uncompressed version of this concatenation. -// - For v2 data pages, the repetition levels and definition levels are -// handled separately from the data and are never compressed (only -// encoded). If a compression scheme is specified, the CRC shall be -// calculated on the concatenation of the uncompressed repetition levels, -// uncompressed definition levels and the compressed column values. -// If no compression scheme is specified, the CRC shall be calculated on -// the uncompressed concatenation. -// - In encrypted columns, CRC is calculated after page encryption; the -// encryption itself is performed after page compression (if compressed) +// - Crc: The 32-bit CRC checksum for the page, to be be calculated as follows: +// +// - The standard CRC32 algorithm is used (with polynomial 0x04C11DB7, +// the same as in e.g. GZip). +// - All page types can have a CRC (v1 and v2 data pages, dictionary pages, +// etc.). +// - The CRC is computed on the serialization binary representation of the page +// (as written to disk), excluding the page header. For example, for v1 +// data pages, the CRC is computed on the concatenation of repetition levels, +// definition levels and column values (optionally compressed, optionally +// encrypted). +// - The CRC computation therefore takes place after any compression +// and encryption steps, if any. +// // If enabled, this allows for disabling checksumming in HDFS if only a few // pages need to be read. -// // - DataPageHeader // - IndexPageHeader // - DictionaryPageHeader @@ -6006,6 +6217,9 @@ func (p *PageHeader) String() string { return fmt.Sprintf("PageHeader(%+v)", *p) } +func (p *PageHeader) Validate() error { + return nil +} // Wrapper struct to store key values // // Attributes: @@ -6165,6 +6379,9 @@ func (p *KeyValue) String() string { return fmt.Sprintf("KeyValue(%+v)", *p) } +func (p *KeyValue) Validate() error { + return nil +} // Wrapper struct to specify sort order // // Attributes: @@ -6358,6 +6575,9 @@ func (p *SortingColumn) String() string { return fmt.Sprintf("SortingColumn(%+v)", *p) } +func (p *SortingColumn) Validate() error { + return nil +} // statistics of a given page type and encoding // // Attributes: @@ -6552,6 +6772,9 @@ func (p *PageEncodingStats) String() string { return fmt.Sprintf("PageEncodingStats(%+v)", *p) } +func (p *PageEncodingStats) Validate() error { + return nil +} // Description for column metadata // // Attributes: @@ -7346,6 +7569,9 @@ func (p *ColumnMetaData) String() string { return fmt.Sprintf("ColumnMetaData(%+v)", *p) } +func (p *ColumnMetaData) Validate() error { + return nil +} type EncryptionWithFooterKey struct { } @@ -7406,6 +7632,9 @@ func (p *EncryptionWithFooterKey) String() string { return fmt.Sprintf("EncryptionWithFooterKey(%+v)", *p) } +func (p *EncryptionWithFooterKey) Validate() error { + return nil +} // Attributes: // - PathInSchema: Column path in schema * // - KeyMetadata: Retrieval metadata of column encryption key * @@ -7581,6 +7810,9 @@ func (p *EncryptionWithColumnKey) String() string { return fmt.Sprintf("EncryptionWithColumnKey(%+v)", *p) } +func (p *EncryptionWithColumnKey) Validate() error { + return nil +} // Attributes: // - ENCRYPTION_WITH_FOOTER_KEY // - ENCRYPTION_WITH_COLUMN_KEY @@ -7752,6 +7984,9 @@ func (p *ColumnCryptoMetaData) String() string { return fmt.Sprintf("ColumnCryptoMetaData(%+v)", *p) } +func (p *ColumnCryptoMetaData) Validate() error { + return nil +} // Attributes: // - FilePath: File where column data is stored. If not set, assumed to be same file as // metadata. This path is relative to the current file. @@ -8254,6 +8489,9 @@ func (p *ColumnChunk) String() string { return fmt.Sprintf("ColumnChunk(%+v)", *p) } +func (p *ColumnChunk) Validate() error { + return nil +} // Attributes: // - Columns: Metadata for each column chunk in this row group. // This list must have the same order as the SchemaElement list in FileMetaData. @@ -8694,6 +8932,9 @@ func (p *RowGroup) String() string { return fmt.Sprintf("RowGroup(%+v)", *p) } +func (p *RowGroup) Validate() error { + return nil +} // Empty struct to signal the order defined by the physical or logical type type TypeDefinedOrder struct { } @@ -8755,6 +8996,9 @@ func (p *TypeDefinedOrder) String() string { return fmt.Sprintf("TypeDefinedOrder(%+v)", *p) } +func (p *TypeDefinedOrder) Validate() error { + return nil +} // Union to specify the order used for the min_value and max_value fields for a // column. This union takes the role of an enhanced enum that allows rich // elements (which will be needed for a collation-based ordering in the future). @@ -8808,6 +9052,13 @@ func (p *TypeDefinedOrder) String() string { // - If the min is +0, the row group may contain -0 values as well. // - If the max is -0, the row group may contain +0 values as well. // - When looking for NaN values, min and max should be ignored. +// +// When writing statistics the following rules should be followed: +// - NaNs should not be written to min or max statistics fields. +// - If the computed max value is zero (whether negative or positive), +// `+0.0` should be written into the max statistics field. +// - If the computed min value is zero (whether negative or positive), +// `-0.0` should be written into the min statistics field. type ColumnOrder struct { TYPE_ORDER *TypeDefinedOrder `thrift:"TYPE_ORDER,1" db:"TYPE_ORDER" json:"TYPE_ORDER,omitempty"` } @@ -8928,6 +9179,9 @@ func (p *ColumnOrder) String() string { return fmt.Sprintf("ColumnOrder(%+v)", *p) } +func (p *ColumnOrder) Validate() error { + return nil +} // Attributes: // - Offset: Offset of the page in the file * // - CompressedPageSize: Size of the page, including header. Sum of compressed_page_size and header @@ -9120,6 +9374,9 @@ func (p *PageLocation) String() string { return fmt.Sprintf("PageLocation(%+v)", *p) } +func (p *PageLocation) Validate() error { + return nil +} // Attributes: // - PageLocations: PageLocations, ordered by increasing PageLocation.offset. It is required // that page_locations[i].first_row_index < page_locations[i+1].first_row_index. @@ -9251,6 +9508,9 @@ func (p *OffsetIndex) String() string { return fmt.Sprintf("OffsetIndex(%+v)", *p) } +func (p *OffsetIndex) Validate() error { + return nil +} // Description for ColumnIndex. // Each [i] refers to the page at OffsetIndex.page_locations[i] // @@ -9260,15 +9520,16 @@ func (p *OffsetIndex) String() string { // have to set the corresponding entries in min_values and max_values to // byte[0], so that all lists have the same length. If false, the // corresponding entries in min_values and max_values must be valid. -// - MinValues: Two lists containing lower and upper bounds for the values of each page. -// These may be the actual minimum and maximum values found on a page, but -// can also be (more compact) values that do not exist on a page. For -// example, instead of storing ""Blart Versenwald III", a writer may set -// min_values[i]="B", max_values[i]="C". Such more compact values must still -// be valid values within the column's logical type. Readers must make sure -// that list entries are populated before using them by inspecting null_pages. +// - MinValues: Two lists containing lower and upper bounds for the values of each page +// determined by the ColumnOrder of the column. These may be the actual +// minimum and maximum values found on a page, but can also be (more compact) +// values that do not exist on a page. For example, instead of storing ""Blart +// Versenwald III", a writer may set min_values[i]="B", max_values[i]="C". +// Such more compact values must still be valid values within the column's +// logical type. Readers must make sure that list entries are populated before +// using them by inspecting null_pages. // - MaxValues -// - BoundaryOrder: Stores whether both min_values and max_values are orderd and if so, in +// - BoundaryOrder: Stores whether both min_values and max_values are ordered and if so, in // which direction. This allows readers to perform binary searches in both // lists. Readers cannot assume that max_values[i] <= min_values[i+1], even // if the lists are ordered. @@ -9644,6 +9905,9 @@ func (p *ColumnIndex) String() string { return fmt.Sprintf("ColumnIndex(%+v)", *p) } +func (p *ColumnIndex) Validate() error { + return nil +} // Attributes: // - AadPrefix: AAD prefix * // - AadFileUnique: Unique file identifier part of AAD suffix * @@ -9848,6 +10112,9 @@ func (p *AesGcmV1) String() string { return fmt.Sprintf("AesGcmV1(%+v)", *p) } +func (p *AesGcmV1) Validate() error { + return nil +} // Attributes: // - AadPrefix: AAD prefix * // - AadFileUnique: Unique file identifier part of AAD suffix * @@ -10052,6 +10319,9 @@ func (p *AesGcmCtrV1) String() string { return fmt.Sprintf("AesGcmCtrV1(%+v)", *p) } +func (p *AesGcmCtrV1) Validate() error { + return nil +} // Attributes: // - AES_GCM_V1 // - AES_GCM_CTR_V1 @@ -10223,6 +10493,9 @@ func (p *EncryptionAlgorithm) String() string { return fmt.Sprintf("EncryptionAlgorithm(%+v)", *p) } +func (p *EncryptionAlgorithm) Validate() error { + return nil +} // Description for file metadata // // Attributes: @@ -10240,17 +10513,20 @@ func (p *EncryptionAlgorithm) String() string { // version (build ). // e.g. impala version 1.0 (build 6cf94d29b2b7115df4de2c06e2ab4326d721eb55) // -// - ColumnOrders: Sort order used for the min_value and max_value fields of each column in -// this file. Sort orders are listed in the order matching the columns in the -// schema. The indexes are not necessary the same though, because only leaf -// nodes of the schema are represented in the list of sort orders. +// - ColumnOrders: Sort order used for the min_value and max_value fields in the Statistics +// objects and the min_values and max_values fields in the ColumnIndex +// objects of each column in this file. Sort orders are listed in the order +// matching the columns in the schema. The indexes are not necessary the same +// though, because only leaf nodes of the schema are represented in the list +// of sort orders. // -// Without column_orders, the meaning of the min_value and max_value fields is -// undefined. To ensure well-defined behaviour, if min_value and max_value are -// written to a Parquet file, column_orders must be written as well. +// Without column_orders, the meaning of the min_value and max_value fields +// in the Statistics object and the ColumnIndex object is undefined. To ensure +// well-defined behaviour, if these fields are written to a Parquet file, +// column_orders must be written as well. // -// The obsolete min and max fields are always sorted by signed comparison -// regardless of column_orders. +// The obsolete min and max fields in the Statistics object are always sorted +// by signed comparison regardless of column_orders. // - EncryptionAlgorithm: Encryption algorithm. This field is set only in encrypted files // with plaintext footer. Files with encrypted footer store algorithm id // in FileCryptoMetaData structure. @@ -10803,6 +11079,9 @@ func (p *FileMetaData) String() string { return fmt.Sprintf("FileMetaData(%+v)", *p) } +func (p *FileMetaData) Validate() error { + return nil +} // Crypto metadata for files with encrypted footer * // // Attributes: @@ -10965,3 +11244,6 @@ func (p *FileCryptoMetaData) String() string { return fmt.Sprintf("FileCryptoMetaData(%+v)", *p) } +func (p *FileCryptoMetaData) Validate() error { + return nil +} diff --git a/go/parquet/internal/testutils/pagebuilder.go b/go/parquet/internal/testutils/pagebuilder.go index 9c1fd72a1bdf0..48ac331640087 100644 --- a/go/parquet/internal/testutils/pagebuilder.go +++ b/go/parquet/internal/testutils/pagebuilder.go @@ -22,13 +22,13 @@ import ( "io" "reflect" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/compress" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/compress" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/parquet/schema" "github.com/stretchr/testify/mock" ) diff --git a/go/parquet/internal/testutils/primitive_typed.go b/go/parquet/internal/testutils/primitive_typed.go index 2bd0a29dd2970..daab0d427ba10 100644 --- a/go/parquet/internal/testutils/primitive_typed.go +++ b/go/parquet/internal/testutils/primitive_typed.go @@ -20,11 +20,11 @@ import ( "fmt" "reflect" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/metadata" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/metadata" + "github.com/apache/arrow/go/v15/parquet/schema" ) type PrimitiveTypedTest struct { diff --git a/go/parquet/internal/testutils/random.go b/go/parquet/internal/testutils/random.go index 2c8a2809dc784..bb9ee0cdf2bba 100644 --- a/go/parquet/internal/testutils/random.go +++ b/go/parquet/internal/testutils/random.go @@ -24,13 +24,14 @@ import ( "time" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/endian" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/pqarrow" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/endian" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/pqarrow" "golang.org/x/exp/rand" "gonum.org/v1/gonum/stat/distuv" @@ -369,6 +370,17 @@ func randFloat64(r *rand.Rand) float64 { } } +// randFloat16 creates a random float value with a normal distribution +// to better spread the values out and ensure we do not return any NaN or Inf values. +func randFloat16(r *rand.Rand) float16.Num { + for { + f := float16.FromBits(uint16(r.Uint64n(math.MaxUint16 + 1))) + if !f.IsNaN() { + return f + } + } +} + // FillRandomFloat32 populates out with random float32 values using seed as the random // seed for the generator to allow consistency for testing. func FillRandomFloat32(seed uint64, out []float32) { @@ -387,6 +399,15 @@ func FillRandomFloat64(seed uint64, out []float64) { } } +// FillRandomFloat16 populates out with random float64 values using seed as the random +// seed for the generator to allow consistency for testing. +func FillRandomFloat16(seed uint64, out []float16.Num) { + r := rand.New(rand.NewSource(seed)) + for idx := range out { + out[idx] = randFloat16(r) + } +} + // FillRandomByteArray populates out with random ByteArray values with lengths between 2 and 12 // using heap as the actual memory storage used for the bytes generated. Each element of // out will be some slice of the bytes in heap, and as such heap must outlive the byte array slices. @@ -438,15 +459,16 @@ func fillRandomIsValid(seed uint64, pctNull float64, out []bool) { // If the type is parquet.ByteArray or parquet.FixedLenByteArray, heap must not be null. // // The default values are: -// []bool uses the current time as the seed with only values of 1 being false, for use -// of creating validity boolean slices. -// all other types use 0 as the seed -// a []parquet.ByteArray is populated with lengths between 2 and 12 -// a []parquet.FixedLenByteArray is populated with fixed size random byte arrays of length 12. +// +// []bool uses the current time as the seed with only values of 1 being false, for use +// of creating validity boolean slices. +// all other types use 0 as the seed +// a []parquet.ByteArray is populated with lengths between 2 and 12 +// a []parquet.FixedLenByteArray is populated with fixed size random byte arrays of length 12. func InitValues(values interface{}, heap *memory.Buffer) { switch arr := values.(type) { case []bool: - fillRandomIsValid(uint64(time.Now().Unix()), 1.0, arr) + fillRandomIsValid(uint64(time.Now().Unix()), 0.5, arr) case []int32: FillRandomInt32(0, arr) case []int64: @@ -455,6 +477,8 @@ func InitValues(values interface{}, heap *memory.Buffer) { FillRandomFloat32(0, arr) case []float64: FillRandomFloat64(0, arr) + case []float16.Num: + FillRandomFloat16(0, arr) case []parquet.Int96: FillRandomInt96(0, arr) case []parquet.ByteArray: diff --git a/go/parquet/internal/testutils/random_arrow.go b/go/parquet/internal/testutils/random_arrow.go index 360b8e7476430..7dd2a3e8b77e3 100644 --- a/go/parquet/internal/testutils/random_arrow.go +++ b/go/parquet/internal/testutils/random_arrow.go @@ -17,9 +17,10 @@ package testutils import ( - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/arrow/memory" "golang.org/x/exp/rand" ) @@ -49,6 +50,13 @@ func RandomNonNull(mem memory.Allocator, dt arrow.DataType, size int) arrow.Arra FillRandomFloat64(0, values) bldr.AppendValues(values, nil) return bldr.NewArray() + case arrow.FLOAT16: + bldr := array.NewFloat16Builder(mem) + defer bldr.Release() + values := make([]float16.Num, size) + FillRandomFloat16(0, values) + bldr.AppendValues(values, nil) + return bldr.NewArray() case arrow.INT64: bldr := array.NewInt64Builder(mem) defer bldr.Release() @@ -212,6 +220,21 @@ func RandomNullable(dt arrow.DataType, size int, numNulls int) arrow.Array { values := make([]float64, size) FillRandomFloat64(0, values) + valid := make([]bool, size) + for idx := range valid { + valid[idx] = true + } + for i := 0; i < numNulls; i++ { + valid[i*2] = false + } + bldr.AppendValues(values, valid) + return bldr.NewArray() + case arrow.FLOAT16: + bldr := array.NewFloat16Builder(memory.DefaultAllocator) + defer bldr.Release() + values := make([]float16.Num, size) + FillRandomFloat16(0, values) + valid := make([]bool, size) for idx := range valid { valid[idx] = true diff --git a/go/parquet/internal/testutils/utils.go b/go/parquet/internal/testutils/utils.go index 1122f4286f386..3da76c17ddc32 100644 --- a/go/parquet/internal/testutils/utils.go +++ b/go/parquet/internal/testutils/utils.go @@ -19,7 +19,7 @@ package testutils import ( "reflect" - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/parquet" ) var typeToParquetTypeMap = map[reflect.Type]parquet.Type{ diff --git a/go/parquet/internal/thrift/helpers.go b/go/parquet/internal/thrift/helpers.go index 357d9020989d3..3835830ac6c2d 100644 --- a/go/parquet/internal/thrift/helpers.go +++ b/go/parquet/internal/thrift/helpers.go @@ -23,7 +23,7 @@ import ( "context" "io" - "github.com/apache/arrow/go/v14/parquet/internal/encryption" + "github.com/apache/arrow/go/v15/parquet/internal/encryption" "github.com/apache/thrift/lib/go/thrift" ) diff --git a/go/parquet/internal/utils/bit_benchmark_test.go b/go/parquet/internal/utils/bit_benchmark_test.go index 2518e48a4baa8..14353380a5694 100644 --- a/go/parquet/internal/utils/bit_benchmark_test.go +++ b/go/parquet/internal/utils/bit_benchmark_test.go @@ -20,9 +20,9 @@ import ( "strconv" "testing" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/internal/bitutils" - "github.com/apache/arrow/go/v14/parquet/internal/testutils" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/internal/bitutils" + "github.com/apache/arrow/go/v15/parquet/internal/testutils" ) type linearBitRunReader struct { diff --git a/go/parquet/internal/utils/bit_packing_arm64.go b/go/parquet/internal/utils/bit_packing_arm64.go index 92fe0f97b77c3..e9fb2ef1d810b 100644 --- a/go/parquet/internal/utils/bit_packing_arm64.go +++ b/go/parquet/internal/utils/bit_packing_arm64.go @@ -23,7 +23,7 @@ import ( "github.com/klauspost/cpuid/v2" // import for side effect of initializing feature flags // based on ARM_ENABLE_EXT env var - _ "github.com/apache/arrow/go/v14/parquet/internal/bmi" + _ "github.com/apache/arrow/go/v15/parquet/internal/bmi" ) func init() { diff --git a/go/parquet/internal/utils/bit_reader.go b/go/parquet/internal/utils/bit_reader.go index cf61395c0b16c..bf9741c79878b 100644 --- a/go/parquet/internal/utils/bit_reader.go +++ b/go/parquet/internal/utils/bit_reader.go @@ -24,10 +24,10 @@ import ( "reflect" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/utils" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/utils" ) // masks for grabbing the trailing bits based on the number of trailing bits desired diff --git a/go/parquet/internal/utils/bit_reader_test.go b/go/parquet/internal/utils/bit_reader_test.go index 317cc4960afe2..5ce1b799b463b 100644 --- a/go/parquet/internal/utils/bit_reader_test.go +++ b/go/parquet/internal/utils/bit_reader_test.go @@ -25,11 +25,11 @@ import ( "strconv" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet/internal/utils" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet/internal/utils" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" "golang.org/x/exp/rand" @@ -494,7 +494,7 @@ func (r *RLERandomSuite) checkRoundTrip(vals []uint64, width int) bool { func (r *RLERandomSuite) checkRoundTripSpaced(vals arrow.Array, width int) { nvalues := vals.Len() - bufsize := utils.MaxBufferSize(width, nvalues) + bufsize := utils.MaxRLEBufferSize(width, nvalues) buffer := make([]byte, bufsize) encoder := utils.NewRleEncoder(utils.NewWriterAtBuffer(buffer), width) diff --git a/go/parquet/internal/utils/bit_writer.go b/go/parquet/internal/utils/bit_writer.go index 99a1db065590d..6cb255f5b0473 100644 --- a/go/parquet/internal/utils/bit_writer.go +++ b/go/parquet/internal/utils/bit_writer.go @@ -21,7 +21,7 @@ import ( "io" "log" - "github.com/apache/arrow/go/v14/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/bitutil" ) // WriterAtBuffer is a convenience struct for providing a WriteAt function diff --git a/go/parquet/internal/utils/bitmap_writer.go b/go/parquet/internal/utils/bitmap_writer.go index 447222139b773..3ef99291e3748 100644 --- a/go/parquet/internal/utils/bitmap_writer.go +++ b/go/parquet/internal/utils/bitmap_writer.go @@ -20,7 +20,7 @@ import ( "encoding/binary" "math/bits" - "github.com/apache/arrow/go/v14/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/bitutil" ) // BitmapWriter is an interface for bitmap writers so that we can use multiple diff --git a/go/parquet/internal/utils/bitmap_writer_test.go b/go/parquet/internal/utils/bitmap_writer_test.go index 9f38f1ef291ea..3dddc7567903e 100644 --- a/go/parquet/internal/utils/bitmap_writer_test.go +++ b/go/parquet/internal/utils/bitmap_writer_test.go @@ -22,8 +22,8 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/parquet/internal/utils" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/parquet/internal/utils" "github.com/stretchr/testify/suite" ) diff --git a/go/parquet/internal/utils/rle.go b/go/parquet/internal/utils/rle.go index 866d7c61b4099..21ac2ce6b5db5 100644 --- a/go/parquet/internal/utils/rle.go +++ b/go/parquet/internal/utils/rle.go @@ -24,10 +24,10 @@ import ( "encoding/binary" "math" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/internal/bitutils" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/internal/bitutils" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" "golang.org/x/xerrors" ) @@ -37,13 +37,13 @@ const ( MaxValuesPerLiteralRun = (1 << 6) * 8 ) -func MinBufferSize(bitWidth int) int { +func MinRLEBufferSize(bitWidth int) int { maxLiteralRunSize := 1 + bitutil.BytesForBits(int64(MaxValuesPerLiteralRun*bitWidth)) maxRepeatedRunSize := binary.MaxVarintLen32 + bitutil.BytesForBits(int64(bitWidth)) return int(utils.Max(maxLiteralRunSize, maxRepeatedRunSize)) } -func MaxBufferSize(width, numValues int) int { +func MaxRLEBufferSize(width, numValues int) int { bytesPerRun := width numRuns := int(bitutil.BytesForBits(int64(numValues))) literalMaxSize := numRuns + (numRuns * bytesPerRun) diff --git a/go/parquet/internal/utils/typed_rle_dict.gen.go b/go/parquet/internal/utils/typed_rle_dict.gen.go index dff22711b120f..886d24564db4b 100644 --- a/go/parquet/internal/utils/typed_rle_dict.gen.go +++ b/go/parquet/internal/utils/typed_rle_dict.gen.go @@ -19,9 +19,9 @@ package utils import ( - "github.com/apache/arrow/go/v14/internal/bitutils" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/internal/bitutils" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" "golang.org/x/xerrors" ) diff --git a/go/parquet/internal/utils/typed_rle_dict.gen.go.tmpl b/go/parquet/internal/utils/typed_rle_dict.gen.go.tmpl index 090275745ff59..abcb419055a92 100644 --- a/go/parquet/internal/utils/typed_rle_dict.gen.go.tmpl +++ b/go/parquet/internal/utils/typed_rle_dict.gen.go.tmpl @@ -17,9 +17,9 @@ package utils import ( - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/internal/bitutils" - "github.com/apache/arrow/go/v14/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/internal/bitutils" + "github.com/apache/arrow/go/v15/internal/utils" ) {{range .In}} diff --git a/go/parquet/metadata/app_version.go b/go/parquet/metadata/app_version.go index 85ace57365bd3..9966827026106 100644 --- a/go/parquet/metadata/app_version.go +++ b/go/parquet/metadata/app_version.go @@ -21,8 +21,8 @@ import ( "strconv" "strings" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/schema" ) var ( diff --git a/go/parquet/metadata/column_chunk.go b/go/parquet/metadata/column_chunk.go index e6ea6ff1ea183..729f741e1b4f9 100644 --- a/go/parquet/metadata/column_chunk.go +++ b/go/parquet/metadata/column_chunk.go @@ -22,13 +22,13 @@ import ( "io" "reflect" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/compress" - "github.com/apache/arrow/go/v14/parquet/internal/encryption" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/thrift" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/compress" + "github.com/apache/arrow/go/v15/parquet/internal/encryption" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/thrift" + "github.com/apache/arrow/go/v15/parquet/schema" "golang.org/x/xerrors" ) diff --git a/go/parquet/metadata/file.go b/go/parquet/metadata/file.go index dddd95c5df670..3335140c2e1c8 100644 --- a/go/parquet/metadata/file.go +++ b/go/parquet/metadata/file.go @@ -24,12 +24,12 @@ import ( "reflect" "unicode/utf8" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/compress" - "github.com/apache/arrow/go/v14/parquet/internal/encryption" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/thrift" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/compress" + "github.com/apache/arrow/go/v15/parquet/internal/encryption" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/thrift" + "github.com/apache/arrow/go/v15/parquet/schema" "golang.org/x/xerrors" ) diff --git a/go/parquet/metadata/metadata_test.go b/go/parquet/metadata/metadata_test.go index b685dd2223274..8caa319f83e63 100644 --- a/go/parquet/metadata/metadata_test.go +++ b/go/parquet/metadata/metadata_test.go @@ -21,9 +21,9 @@ import ( "testing" "unsafe" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/metadata" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/metadata" + "github.com/apache/arrow/go/v15/parquet/schema" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/go/parquet/metadata/row_group.go b/go/parquet/metadata/row_group.go index 16af67241e959..2923720371abe 100644 --- a/go/parquet/metadata/row_group.go +++ b/go/parquet/metadata/row_group.go @@ -20,10 +20,10 @@ import ( "fmt" "reflect" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/encryption" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/encryption" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet/schema" ) // RowGroupMetaData is a proxy around the thrift RowGroup meta data object diff --git a/go/parquet/metadata/stat_compare_test.go b/go/parquet/metadata/stat_compare_test.go index d02123cbef039..041696d84d17a 100644 --- a/go/parquet/metadata/stat_compare_test.go +++ b/go/parquet/metadata/stat_compare_test.go @@ -20,8 +20,8 @@ import ( "encoding/binary" "testing" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/schema" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/go/parquet/metadata/statistics.go b/go/parquet/metadata/statistics.go index 245b06a7348c4..43294272dec35 100644 --- a/go/parquet/metadata/statistics.go +++ b/go/parquet/metadata/statistics.go @@ -22,17 +22,18 @@ import ( "math" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/debug" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/debug" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet/schema" ) -//go:generate go run ../../arrow/_tools/tmpl/main.go -i -data=../internal/encoding/physical_types.tmpldata statistics_types.gen.go.tmpl +//go:generate go run ../../arrow/_tools/tmpl/main.go -i -data=statistics_types.tmpldata statistics_types.gen.go.tmpl type StatProvider interface { GetMin() []byte @@ -373,6 +374,9 @@ var ( defaultMinUInt96 parquet.Int96 defaultMaxInt96 parquet.Int96 defaultMaxUInt96 parquet.Int96 + + defaultMinFloat16 parquet.FixedLenByteArray = float16.MaxNum.ToLEBytes() + defaultMaxFloat16 parquet.FixedLenByteArray = float16.MinNum.ToLEBytes() ) func init() { @@ -407,6 +411,14 @@ func (s *Int96Statistics) defaultMax() parquet.Int96 { return defaultMaxInt96 } +func (Float16Statistics) defaultMin() parquet.FixedLenByteArray { + return defaultMinFloat16 +} + +func (Float16Statistics) defaultMax() parquet.FixedLenByteArray { + return defaultMaxFloat16 +} + func (Float32Statistics) defaultMin() float32 { return math.MaxFloat32 } func (Float32Statistics) defaultMax() float32 { return -math.MaxFloat32 } func (Float64Statistics) defaultMin() float64 { return math.MaxFloat64 } @@ -427,6 +439,10 @@ func (FixedLenByteArrayStatistics) equal(a, b parquet.FixedLenByteArray) bool { return bytes.Equal(a, b) } +func (Float16Statistics) equal(a, b parquet.FixedLenByteArray) bool { + return float16.FromLEBytes(a).Equal(float16.FromLEBytes(b)) +} + func (BooleanStatistics) less(a, b bool) bool { return !a && b } @@ -481,6 +497,10 @@ func (s *FixedLenByteArrayStatistics) less(a, b parquet.FixedLenByteArray) bool return signedByteLess([]byte(a), []byte(b)) } +func (Float16Statistics) less(a, b parquet.FixedLenByteArray) bool { + return float16.FromLEBytes(a).Less(float16.FromLEBytes(b)) +} + func (BooleanStatistics) cleanStat(minMax minmaxPairBoolean) *minmaxPairBoolean { return &minMax } func (Int32Statistics) cleanStat(minMax minmaxPairInt32) *minmaxPairInt32 { return &minMax } func (Int64Statistics) cleanStat(minMax minmaxPairInt64) *minmaxPairInt64 { return &minMax } @@ -535,6 +555,29 @@ func (Float64Statistics) cleanStat(minMax minmaxPairFloat64) *minmaxPairFloat64 return &minMax } +func (Float16Statistics) cleanStat(minMax minmaxPairFloat16) *minmaxPairFloat16 { + min := float16.FromLEBytes(minMax[0][:]) + max := float16.FromLEBytes(minMax[1][:]) + + if min.IsNaN() || max.IsNaN() { + return nil + } + + if min.Equal(float16.MaxNum) && max.Equal(float16.MinNum) { + return nil + } + + zero := float16.New(0) + if min.Equal(zero) && !min.Signbit() { + minMax[0] = min.Negate().ToLEBytes() + } + if max.Equal(zero) && max.Signbit() { + minMax[1] = max.Negate().ToLEBytes() + } + + return &minMax +} + func (ByteArrayStatistics) cleanStat(minMax minmaxPairByteArray) *minmaxPairByteArray { if minMax[0] == nil || minMax[1] == nil { return nil diff --git a/go/parquet/metadata/statistics_test.go b/go/parquet/metadata/statistics_test.go index 35d8b7821c51a..19311dc8955d3 100644 --- a/go/parquet/metadata/statistics_test.go +++ b/go/parquet/metadata/statistics_test.go @@ -21,35 +21,48 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/metadata" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/metadata" + "github.com/apache/arrow/go/v15/parquet/schema" "github.com/stretchr/testify/assert" ) // NOTE(zeroshade): tests will be added and updated after merging the "file" package // since the tests that I wrote relied on the file writer/reader for ease of use. +func newFloat16Node(name string, rep parquet.Repetition, fieldID int32) *schema.PrimitiveNode { + return schema.MustPrimitive(schema.NewPrimitiveNodeLogical(name, rep, schema.Float16LogicalType{}, parquet.Types.FixedLenByteArray, 2, fieldID)) +} + func TestCheckNaNs(t *testing.T) { const ( numvals = 8 min = -4.0 max = 3.0 ) - nan := math.NaN() + var ( + nan = math.NaN() + f16Min parquet.FixedLenByteArray = float16.New(float32(min)).ToLEBytes() + f16Max parquet.FixedLenByteArray = float16.New(float32(max)).ToLEBytes() + ) allNans := []float64{nan, nan, nan, nan, nan, nan, nan, nan} allNansf32 := make([]float32, numvals) + allNansf16 := make([]parquet.FixedLenByteArray, numvals) for idx, v := range allNans { allNansf32[idx] = float32(v) + allNansf16[idx] = float16.New(float32(v)).ToLEBytes() } someNans := []float64{nan, max, -3.0, -1.0, nan, 2.0, min, nan} someNansf32 := make([]float32, numvals) + someNansf16 := make([]parquet.FixedLenByteArray, numvals) for idx, v := range someNans { someNansf32[idx] = float32(v) + someNansf16[idx] = float16.New(float32(v)).ToLEBytes() } validBitmap := []byte{0x7F} // 0b01111111 @@ -62,6 +75,8 @@ func TestCheckNaNs(t *testing.T) { s.Update(values.([]float32), 0) case *metadata.Float64Statistics: s.Update(values.([]float64), 0) + case *metadata.Float16Statistics: + s.Update(values.([]parquet.FixedLenByteArray), 0) } assert.False(t, stats.HasMinMax()) } else { @@ -72,6 +87,8 @@ func TestCheckNaNs(t *testing.T) { s.UpdateSpaced(values.([]float32), bitmap, 0, int64(nullCount)) case *metadata.Float64Statistics: s.UpdateSpaced(values.([]float64), bitmap, 0, int64(nullCount)) + case *metadata.Float16Statistics: + s.UpdateSpaced(values.([]parquet.FixedLenByteArray), bitmap, 0, int64(nullCount)) } assert.False(t, stats.HasMinMax()) } @@ -89,6 +106,11 @@ func TestCheckNaNs(t *testing.T) { assert.True(t, stats.HasMinMax()) assert.Equal(t, expectedMin, s.Min()) assert.Equal(t, expectedMax, s.Max()) + case *metadata.Float16Statistics: + s.Update(values.([]parquet.FixedLenByteArray), 0) + assert.True(t, stats.HasMinMax()) + assert.Equal(t, expectedMin, s.Min()) + assert.Equal(t, expectedMax, s.Max()) } } @@ -106,34 +128,48 @@ func TestCheckNaNs(t *testing.T) { assert.True(t, s.HasMinMax()) assert.Equal(t, expectedMin, s.Min()) assert.Equal(t, expectedMax, s.Max()) + case *metadata.Float16Statistics: + s.UpdateSpaced(values.([]parquet.FixedLenByteArray), bitmap, 0, int64(nullCount)) + assert.True(t, s.HasMinMax()) + assert.Equal(t, expectedMin, s.Min()) + assert.Equal(t, expectedMax, s.Max()) } } f32Col := schema.NewColumn(schema.NewFloat32Node("f", parquet.Repetitions.Optional, -1), 1, 1) f64Col := schema.NewColumn(schema.NewFloat64Node("f", parquet.Repetitions.Optional, -1), 1, 1) + f16Col := schema.NewColumn(newFloat16Node("f", parquet.Repetitions.Required, -1), 1, 1) // test values someNanStats := metadata.NewStatistics(f64Col, memory.DefaultAllocator) someNanStatsf32 := metadata.NewStatistics(f32Col, memory.DefaultAllocator) + someNanStatsf16 := metadata.NewStatistics(f16Col, memory.DefaultAllocator) // ingesting only nans should not yield a min or max assertUnsetMinMax(someNanStats, allNans, nil) assertUnsetMinMax(someNanStatsf32, allNansf32, nil) + assertUnsetMinMax(someNanStatsf16, allNansf16, nil) // ingesting a mix should yield a valid min/max assertMinMaxAre(someNanStats, someNans, min, max) assertMinMaxAre(someNanStatsf32, someNansf32, float32(min), float32(max)) + assertMinMaxAre(someNanStatsf16, someNansf16, f16Min, f16Max) // ingesting only nans after a valid min/max should have no effect assertMinMaxAre(someNanStats, allNans, min, max) assertMinMaxAre(someNanStatsf32, allNansf32, float32(min), float32(max)) + assertMinMaxAre(someNanStatsf16, allNansf16, f16Min, f16Max) someNanStats = metadata.NewStatistics(f64Col, memory.DefaultAllocator) someNanStatsf32 = metadata.NewStatistics(f32Col, memory.DefaultAllocator) + someNanStatsf16 = metadata.NewStatistics(f16Col, memory.DefaultAllocator) assertUnsetMinMax(someNanStats, allNans, validBitmap) assertUnsetMinMax(someNanStatsf32, allNansf32, validBitmap) + assertUnsetMinMax(someNanStatsf16, allNansf16, validBitmap) // nans should not pollute min/max when excluded via null bitmap assertMinMaxAreSpaced(someNanStats, someNans, validBitmapNoNaNs, min, max) assertMinMaxAreSpaced(someNanStatsf32, someNansf32, validBitmapNoNaNs, float32(min), float32(max)) + assertMinMaxAreSpaced(someNanStatsf16, someNansf16, validBitmapNoNaNs, f16Min, f16Max) // ingesting nans with a null bitmap should not change the result assertMinMaxAreSpaced(someNanStats, someNans, validBitmap, min, max) assertMinMaxAreSpaced(someNanStatsf32, someNansf32, validBitmap, float32(min), float32(max)) + assertMinMaxAreSpaced(someNanStatsf16, someNansf16, validBitmap, f16Min, f16Max) } func TestCheckNegativeZeroStats(t *testing.T) { @@ -155,37 +191,61 @@ func TestCheckNegativeZeroStats(t *testing.T) { assert.True(t, math.Signbit(s.Min())) assert.Equal(t, zero, s.Max()) assert.False(t, math.Signbit(s.Max())) + case *metadata.Float16Statistics: + s.Update(values.([]parquet.FixedLenByteArray), 0) + assert.True(t, s.HasMinMax()) + var zero float64 + min := float64(float16.FromLEBytes(s.Min()).Float32()) + max := float64(float16.FromLEBytes(s.Max()).Float32()) + assert.Equal(t, zero, min) + assert.True(t, math.Signbit(min)) + assert.Equal(t, zero, max) + assert.False(t, math.Signbit(max)) } } fcol := schema.NewColumn(schema.NewFloat32Node("f", parquet.Repetitions.Optional, -1), 1, 1) dcol := schema.NewColumn(schema.NewFloat64Node("d", parquet.Repetitions.Optional, -1), 1, 1) + hcol := schema.NewColumn(newFloat16Node("h", parquet.Repetitions.Optional, -1), 1, 1) var f32zero float32 var f64zero float64 + var f16PosZero parquet.FixedLenByteArray = float16.New(+f32zero).ToLEBytes() + var f16NegZero parquet.FixedLenByteArray = float16.New(-f32zero).ToLEBytes() + + assert.False(t, float16.FromLEBytes(f16PosZero).Signbit()) + assert.True(t, float16.FromLEBytes(f16NegZero).Signbit()) { fstats := metadata.NewStatistics(fcol, memory.DefaultAllocator) dstats := metadata.NewStatistics(dcol, memory.DefaultAllocator) + hstats := metadata.NewStatistics(hcol, memory.DefaultAllocator) assertMinMaxZeroesSign(fstats, []float32{-f32zero, f32zero}) assertMinMaxZeroesSign(dstats, []float64{-f64zero, f64zero}) + assertMinMaxZeroesSign(hstats, []parquet.FixedLenByteArray{f16NegZero, f16PosZero}) } { fstats := metadata.NewStatistics(fcol, memory.DefaultAllocator) dstats := metadata.NewStatistics(dcol, memory.DefaultAllocator) + hstats := metadata.NewStatistics(hcol, memory.DefaultAllocator) assertMinMaxZeroesSign(fstats, []float32{f32zero, -f32zero}) assertMinMaxZeroesSign(dstats, []float64{f64zero, -f64zero}) + assertMinMaxZeroesSign(hstats, []parquet.FixedLenByteArray{f16PosZero, f16NegZero}) } { fstats := metadata.NewStatistics(fcol, memory.DefaultAllocator) dstats := metadata.NewStatistics(dcol, memory.DefaultAllocator) + hstats := metadata.NewStatistics(hcol, memory.DefaultAllocator) assertMinMaxZeroesSign(fstats, []float32{-f32zero, -f32zero}) assertMinMaxZeroesSign(dstats, []float64{-f64zero, -f64zero}) + assertMinMaxZeroesSign(hstats, []parquet.FixedLenByteArray{f16NegZero, f16NegZero}) } { fstats := metadata.NewStatistics(fcol, memory.DefaultAllocator) dstats := metadata.NewStatistics(dcol, memory.DefaultAllocator) + hstats := metadata.NewStatistics(hcol, memory.DefaultAllocator) assertMinMaxZeroesSign(fstats, []float32{f32zero, f32zero}) assertMinMaxZeroesSign(dstats, []float64{f64zero, f64zero}) + assertMinMaxZeroesSign(hstats, []parquet.FixedLenByteArray{f16PosZero, f16PosZero}) } } diff --git a/go/parquet/metadata/statistics_types.gen.go b/go/parquet/metadata/statistics_types.gen.go index e6aa7f1801a0f..baecd185d14fc 100644 --- a/go/parquet/metadata/statistics_types.gen.go +++ b/go/parquet/metadata/statistics_types.gen.go @@ -22,14 +22,15 @@ import ( "fmt" "math" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/bitutils" - shared_utils "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/bitutils" + shared_utils "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/parquet/schema" "golang.org/x/xerrors" ) @@ -2432,6 +2433,314 @@ func (s *FixedLenByteArrayStatistics) Encode() (enc EncodedStatistics, err error return } +type minmaxPairFloat16 [2]parquet.FixedLenByteArray + +// Float16Statistics is the typed interface for managing stats for a column +// of Float16 type. +type Float16Statistics struct { + statistics + min parquet.FixedLenByteArray + max parquet.FixedLenByteArray + + bitSetReader bitutils.SetBitRunReader +} + +// NewFloat16Statistics constructs an appropriate stat object type using the +// given column descriptor and allocator. +// +// Panics if the physical type of descr is not parquet.Type.FixedLenByteArray +// Panics if the logical type of descr is not schema.Float16LogicalType +func NewFloat16Statistics(descr *schema.Column, mem memory.Allocator) *Float16Statistics { + if descr.PhysicalType() != parquet.Types.FixedLenByteArray { + panic(fmt.Errorf("parquet: invalid type %s for constructing a Float16 stat object", descr.PhysicalType())) + } + if !descr.LogicalType().Equals(schema.Float16LogicalType{}) { + panic(fmt.Errorf("parquet: invalid logical type %s for constructing a Float16 stat object", descr.LogicalType().String())) + } + + return &Float16Statistics{ + statistics: statistics{ + descr: descr, + hasNullCount: true, + hasDistinctCount: true, + order: descr.SortOrder(), + encoder: encoding.NewEncoder(descr.PhysicalType(), parquet.Encodings.Plain, false, descr, mem), + mem: mem, + }, + } +} + +// NewFloat16StatisticsFromEncoded will construct a propertly typed statistics object +// initializing it with the provided information. +func NewFloat16StatisticsFromEncoded(descr *schema.Column, mem memory.Allocator, nvalues int64, encoded StatProvider) *Float16Statistics { + ret := NewFloat16Statistics(descr, mem) + ret.nvalues += nvalues + if encoded.IsSetNullCount() { + ret.IncNulls(encoded.GetNullCount()) + } + if encoded.IsSetDistinctCount() { + ret.IncDistinct(encoded.GetDistinctCount()) + } + + encodedMin := encoded.GetMin() + if encodedMin != nil && len(encodedMin) > 0 { + ret.min = ret.plainDecode(encodedMin) + } + encodedMax := encoded.GetMax() + if encodedMax != nil && len(encodedMax) > 0 { + ret.max = ret.plainDecode(encodedMax) + } + ret.hasMinMax = encoded.IsSetMax() || encoded.IsSetMin() + return ret +} + +func (s *Float16Statistics) plainEncode(src parquet.FixedLenByteArray) []byte { + s.encoder.(encoding.FixedLenByteArrayEncoder).Put([]parquet.FixedLenByteArray{src}) + buf, err := s.encoder.FlushValues() + if err != nil { + panic(err) // recovered by Encode + } + defer buf.Release() + + out := make([]byte, buf.Len()) + copy(out, buf.Bytes()) + return out +} + +func (s *Float16Statistics) plainDecode(src []byte) parquet.FixedLenByteArray { + var buf [1]parquet.FixedLenByteArray + + decoder := encoding.NewDecoder(s.descr.PhysicalType(), parquet.Encodings.Plain, s.descr, s.mem) + decoder.SetData(1, src) + decoder.(encoding.FixedLenByteArrayDecoder).Decode(buf[:]) + return buf[0] +} + +func (s *Float16Statistics) minval(a, b parquet.FixedLenByteArray) parquet.FixedLenByteArray { + switch { + case a == nil: + return b + case b == nil: + return a + case s.less(a, b): + return a + default: + return b + } +} + +func (s *Float16Statistics) maxval(a, b parquet.FixedLenByteArray) parquet.FixedLenByteArray { + switch { + case a == nil: + return b + case b == nil: + return a + case s.less(a, b): + return b + default: + return a + } +} + +// MinMaxEqual returns true if both stat objects have the same Min and Max values +func (s *Float16Statistics) MinMaxEqual(rhs *Float16Statistics) bool { + return s.equal(s.min, rhs.min) && s.equal(s.max, rhs.max) +} + +// Equals returns true only if both objects are the same type, have the same min and +// max values, null count, distinct count and number of values. +func (s *Float16Statistics) Equals(other TypedStatistics) bool { + if s.Type() != other.Type() || !s.descr.LogicalType().Equals(other.Descr().LogicalType()) { + return false + } + rhs, ok := other.(*Float16Statistics) + if !ok { + return false + } + + if s.HasMinMax() != rhs.HasMinMax() { + return false + } + return (s.hasMinMax && s.MinMaxEqual(rhs)) && + s.NullCount() == rhs.NullCount() && + s.DistinctCount() == rhs.DistinctCount() && + s.NumValues() == rhs.NumValues() +} + +func (s *Float16Statistics) coalesce(val, fallback parquet.FixedLenByteArray) parquet.FixedLenByteArray { + if float16.FromLEBytes(val).IsNaN() { + return fallback + } + return val +} + +func (s *Float16Statistics) getMinMax(values []parquet.FixedLenByteArray) (min, max parquet.FixedLenByteArray) { + defMin := s.defaultMin() + defMax := s.defaultMax() + + min = defMin + max = defMax + + for _, v := range values { + min = s.minval(min, s.coalesce(v, defMin)) + max = s.maxval(max, s.coalesce(v, defMax)) + } + return +} + +func (s *Float16Statistics) getMinMaxSpaced(values []parquet.FixedLenByteArray, validBits []byte, validBitsOffset int64) (min, max parquet.FixedLenByteArray) { + min = s.defaultMin() + max = s.defaultMax() + + if s.bitSetReader == nil { + s.bitSetReader = bitutils.NewSetBitRunReader(validBits, validBitsOffset, int64(len(values))) + } else { + s.bitSetReader.Reset(validBits, validBitsOffset, int64(len(values))) + } + + for { + run := s.bitSetReader.NextRun() + if run.Length == 0 { + break + } + for _, v := range values[int(run.Pos):int(run.Pos+run.Length)] { + min = s.minval(min, coalesce(v, s.defaultMin()).(parquet.FixedLenByteArray)) + max = s.maxval(max, coalesce(v, s.defaultMax()).(parquet.FixedLenByteArray)) + } + } + return +} + +func (s *Float16Statistics) Min() parquet.FixedLenByteArray { return s.min } +func (s *Float16Statistics) Max() parquet.FixedLenByteArray { return s.max } + +// Merge merges the stats from other into this stat object, updating +// the null count, distinct count, number of values and the min/max if +// appropriate. +func (s *Float16Statistics) Merge(other TypedStatistics) { + rhs, ok := other.(*Float16Statistics) + if !ok { + panic("incompatible stat type merge") + } + + s.statistics.merge(rhs) + if rhs.HasMinMax() { + s.SetMinMax(rhs.Min(), rhs.Max()) + } +} + +// Update is used to add more values to the current stat object, finding the +// min and max values etc. +func (s *Float16Statistics) Update(values []parquet.FixedLenByteArray, numNull int64) { + s.IncNulls(numNull) + s.nvalues += int64(len(values)) + + if len(values) == 0 { + return + } + + s.SetMinMax(s.getMinMax(values)) +} + +// UpdateSpaced is just like Update, but for spaced values using validBits to determine +// and skip null values. +func (s *Float16Statistics) UpdateSpaced(values []parquet.FixedLenByteArray, validBits []byte, validBitsOffset, numNull int64) { + s.IncNulls(numNull) + notnull := int64(len(values)) - numNull + s.nvalues += notnull + + if notnull == 0 { + return + } + + s.SetMinMax(s.getMinMaxSpaced(values, validBits, validBitsOffset)) +} + +func (s *Float16Statistics) UpdateFromArrow(values arrow.Array, updateCounts bool) error { + if updateCounts { + s.IncNulls(int64(values.NullN())) + s.nvalues += int64(values.Len() - values.NullN()) + } + + if values.NullN() == values.Len() { + return nil + } + + return fmt.Errorf("%w: update float16 stats from Arrow", arrow.ErrNotImplemented) +} + +// SetMinMax updates the min and max values only if they are not currently set +// or if argMin is less than the current min / argMax is greater than the current max +func (s *Float16Statistics) SetMinMax(argMin, argMax parquet.FixedLenByteArray) { + maybeMinMax := s.cleanStat([2]parquet.FixedLenByteArray{argMin, argMax}) + if maybeMinMax == nil { + return + } + + min := (*maybeMinMax)[0] + max := (*maybeMinMax)[1] + + if !s.hasMinMax { + s.hasMinMax = true + s.min = min + s.max = max + } else { + if !s.less(s.min, min) { + s.min = min + } + if s.less(s.max, max) { + s.max = max + } + } +} + +// EncodeMin returns the encoded min value with plain encoding. +// +// ByteArray stats do not include the length in the encoding. +func (s *Float16Statistics) EncodeMin() []byte { + if s.HasMinMax() { + return s.plainEncode(s.min) + } + return nil +} + +// EncodeMax returns the current encoded max value with plain encoding +// +// ByteArray stats do not include the length in the encoding +func (s *Float16Statistics) EncodeMax() []byte { + if s.HasMinMax() { + return s.plainEncode(s.max) + } + return nil +} + +// Encode returns a populated EncodedStatistics object +func (s *Float16Statistics) Encode() (enc EncodedStatistics, err error) { + defer func() { + if r := recover(); r != nil { + switch r := r.(type) { + case error: + err = r + case string: + err = xerrors.New(r) + default: + err = fmt.Errorf("unknown error type thrown from panic: %v", r) + } + } + }() + if s.HasMinMax() { + enc.SetMax(s.EncodeMax()) + enc.SetMin(s.EncodeMin()) + } + if s.HasNullCount() { + enc.SetNullCount(s.NullCount()) + } + if s.HasDistinctCount() { + enc.SetDistinctCount(s.DistinctCount()) + } + return +} + // NewStatistics uses the type in the column descriptor to construct the appropriate // typed stats object. If mem is nil, then memory.DefaultAllocator will be used. func NewStatistics(descr *schema.Column, mem memory.Allocator) TypedStatistics { @@ -2454,6 +2763,9 @@ func NewStatistics(descr *schema.Column, mem memory.Allocator) TypedStatistics { case parquet.Types.ByteArray: return NewByteArrayStatistics(descr, mem) case parquet.Types.FixedLenByteArray: + if descr.LogicalType().Equals(schema.Float16LogicalType{}) { + return NewFloat16Statistics(descr, mem) + } return NewFixedLenByteArrayStatistics(descr, mem) default: panic("not implemented") @@ -2484,6 +2796,9 @@ func NewStatisticsFromEncoded(descr *schema.Column, mem memory.Allocator, nvalue case parquet.Types.ByteArray: return NewByteArrayStatisticsFromEncoded(descr, mem, nvalues, encoded) case parquet.Types.FixedLenByteArray: + if descr.LogicalType().Equals(schema.Float16LogicalType{}) { + return NewFloat16StatisticsFromEncoded(descr, mem, nvalues, encoded) + } return NewFixedLenByteArrayStatisticsFromEncoded(descr, mem, nvalues, encoded) default: panic("not implemented") diff --git a/go/parquet/metadata/statistics_types.gen.go.tmpl b/go/parquet/metadata/statistics_types.gen.go.tmpl index 35470f06046bf..93495527c7e54 100644 --- a/go/parquet/metadata/statistics_types.gen.go.tmpl +++ b/go/parquet/metadata/statistics_types.gen.go.tmpl @@ -19,13 +19,13 @@ package metadata import ( "fmt" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/schema" - "github.com/apache/arrow/go/v14/parquet/internal/utils" - shared_utils "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" - "github.com/apache/arrow/go/v14/internal/bitutils" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/schema" + "github.com/apache/arrow/go/v15/parquet/internal/utils" + shared_utils "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/internal/bitutils" ) {{range .In}} @@ -45,10 +45,18 @@ type {{.Name}}Statistics struct { // given column descriptor and allocator. // // Panics if the physical type of descr is not parquet.Type.{{if .physical}}{{.physical}}{{else}}{{.Name}}{{end}} +{{- if eq .Name "Float16"}} +// Panics if the logical type of descr is not schema.Float16LogicalType +{{- end}} func New{{.Name}}Statistics(descr *schema.Column, mem memory.Allocator) *{{.Name}}Statistics { if descr.PhysicalType() != parquet.Types.{{if .physical}}{{.physical}}{{else}}{{.Name}}{{end}} { panic(fmt.Errorf("parquet: invalid type %s for constructing a {{.Name}} stat object", descr.PhysicalType())) } +{{- if eq .Name "Float16"}} + if !descr.LogicalType().Equals(schema.Float16LogicalType{}) { + panic(fmt.Errorf("parquet: invalid logical type %s for constructing a {{.Name}} stat object", descr.LogicalType().String())) + } +{{- end}} return &{{.Name}}Statistics{ statistics: statistics{ @@ -96,7 +104,7 @@ func (s *{{.Name}}Statistics) plainEncode(src {{.name}}) []byte { copy(out, src) return out {{- else}} - s.encoder.(encoding.{{.Name}}Encoder).Put([]{{.name}}{src}) + s.encoder.(encoding.{{if .logical}}{{.physical}}{{else}}{{.Name}}{{end}}Encoder).Put([]{{.name}}{src}) buf, err := s.encoder.FlushValues() if err != nil { panic(err) // recovered by Encode @@ -117,12 +125,12 @@ func (s *{{.Name}}Statistics) plainDecode(src []byte) {{.name}} { decoder := encoding.NewDecoder(s.descr.PhysicalType(), parquet.Encodings.Plain, s.descr, s.mem) decoder.SetData(1, src) - decoder.(encoding.{{.Name}}Decoder).Decode(buf[:]) + decoder.(encoding.{{if .logical}}{{.physical}}{{else}}{{.Name}}{{end}}Decoder).Decode(buf[:]) return buf[0] {{- end}} } -{{if and (ne .Name "ByteArray") (ne .Name "FixedLenByteArray")}} +{{if and (ne .Name "ByteArray") (ne .Name "FixedLenByteArray") (ne .Name "Float16")}} func (s *{{.Name}}Statistics) minval(a, b {{.name}}) {{.name}} { if s.less(a, b) { return a @@ -172,7 +180,11 @@ func (s *{{.Name}}Statistics) MinMaxEqual(rhs *{{.Name}}Statistics) bool { // Equals returns true only if both objects are the same type, have the same min and // max values, null count, distinct count and number of values. func (s *{{.Name}}Statistics) Equals(other TypedStatistics) bool { +{{- if .logical}} + if s.Type() != other.Type() || !s.descr.LogicalType().Equals(other.Descr().LogicalType()) { +{{- else}} if s.Type() != other.Type() { +{{- end}} return false } rhs, ok := other.(*{{.Name}}Statistics) @@ -194,6 +206,13 @@ func (s *{{.Name}}Statistics) coalesce(val, fallback {{.name}}) {{.name}} { } return val } +{{else if eq .Name "Float16"}} +func (s *{{.Name}}Statistics) coalesce(val, fallback {{.name}}) {{.name}} { + if float16.FromLEBytes(val).IsNaN() { + return fallback + } + return val +} {{end}} func (s *{{.Name}}Statistics) getMinMax(values []{{.name}}) (min, max {{.name}}) { @@ -212,7 +231,7 @@ func (s *{{.Name}}Statistics) getMinMax(values []{{.name}}) (min, max {{.name}}) max = defMax for _, v := range values { -{{- if or (eq .name "float32") (eq .name "float64") }} +{{- if or (eq .name "float32") (eq .name "float64") (eq .Name "Float16") }} min = s.minval(min, s.coalesce(v, defMin)) max = s.maxval(max, s.coalesce(v, defMax)) {{- else}} @@ -261,7 +280,7 @@ func (s *{{.Name}}Statistics) getMinMaxSpaced(values []{{.name}}, validBits []by } {{- else}} for _, v := range values[int(run.Pos):int(run.Pos+run.Length)] { -{{- if or (eq .name "float32") (eq .name "float64") }} +{{- if or (eq .name "float32") (eq .name "float64") (eq .Name "Float16") }} min = s.minval(min, coalesce(v, s.defaultMin()).({{.name}})) max = s.maxval(max, coalesce(v, s.defaultMax()).({{.name}})) {{- else}} @@ -381,7 +400,9 @@ func (s *{{.Name}}Statistics) UpdateFromArrow(values arrow.Array, updateCounts b s.SetMinMax(min, max) return nil {{else if eq .Name "Boolean"}} - return fmt.Errorf("%w: update boolean stats from Arrow", arrow.ErrNotImplemented) + return fmt.Errorf("%w: update boolean stats from Arrow", arrow.ErrNotImplemented) +{{else if eq .Name "Float16"}} + return fmt.Errorf("%w: update float16 stats from Arrow", arrow.ErrNotImplemented) {{else}} if values.DataType().(arrow.FixedWidthDataType).Bytes() != arrow.{{.Name}}SizeBytes { return fmt.Errorf("%w: cannot update {{.name}} stats with %s arrow array", @@ -475,8 +496,15 @@ func NewStatistics(descr *schema.Column, mem memory.Allocator) TypedStatistics { } switch descr.PhysicalType() { {{- range .In}} + {{- if not .logical}} case parquet.Types.{{if .physical}}{{.physical}}{{else}}{{.Name}}{{end}}: + {{- if eq .Name "FixedLenByteArray"}} + if descr.LogicalType().Equals(schema.Float16LogicalType{}) { + return NewFloat16Statistics(descr, mem) + } + {{- end}} return New{{.Name}}Statistics(descr, mem) + {{- end}} {{- end}} default: panic("not implemented") @@ -493,8 +521,15 @@ func NewStatisticsFromEncoded(descr *schema.Column, mem memory.Allocator, nvalue } switch descr.PhysicalType() { {{- range .In}} + {{- if not .logical}} case parquet.Types.{{if .physical}}{{.physical}}{{else}}{{.Name}}{{end}}: + {{- if eq .Name "FixedLenByteArray"}} + if descr.LogicalType().Equals(schema.Float16LogicalType{}) { + return NewFloat16StatisticsFromEncoded(descr, mem, nvalues, encoded) + } + {{- end}} return New{{.Name}}StatisticsFromEncoded(descr, mem, nvalues, encoded) + {{- end}} {{- end}} default: panic("not implemented") diff --git a/go/parquet/metadata/statistics_types.tmpldata b/go/parquet/metadata/statistics_types.tmpldata new file mode 100644 index 0000000000000..400c0a3ca515d --- /dev/null +++ b/go/parquet/metadata/statistics_types.tmpldata @@ -0,0 +1,60 @@ +[ + { + "Name": "Int32", + "name": "int32", + "lower": "int32", + "prefix": "arrow" + }, + { + "Name": "Int64", + "name": "int64", + "lower": "int64", + "prefix": "arrow" + }, + { + "Name": "Int96", + "name": "parquet.Int96", + "lower": "int96", + "prefix": "parquet" + }, + { + "Name": "Float32", + "name": "float32", + "lower": "float32", + "prefix": "arrow", + "physical": "Float" + }, + { + "Name": "Float64", + "name": "float64", + "lower": "float64", + "prefix": "arrow", + "physical": "Double" + }, + { + "Name": "Boolean", + "name": "bool", + "lower": "bool", + "prefix": "arrow" + }, + { + "Name": "ByteArray", + "name": "parquet.ByteArray", + "lower": "byteArray", + "prefix": "parquet" + }, + { + "Name": "FixedLenByteArray", + "name": "parquet.FixedLenByteArray", + "lower": "fixedLenByteArray", + "prefix": "parquet" + }, + { + "Name": "Float16", + "name": "parquet.FixedLenByteArray", + "lower": "float16", + "prefix": "parquet", + "physical": "FixedLenByteArray", + "logical": "Float16LogicalType" + } +] diff --git a/go/parquet/pqarrow/column_readers.go b/go/parquet/pqarrow/column_readers.go index 759a3d8675927..49f3fac0a3b7c 100644 --- a/go/parquet/pqarrow/column_readers.go +++ b/go/parquet/pqarrow/column_readers.go @@ -26,16 +26,16 @@ import ( "time" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/schema" "golang.org/x/sync/errgroup" ) @@ -517,6 +517,14 @@ func transferColumnData(rdr file.RecordReader, valueType arrow.DataType, descr * default: return nil, errors.New("time unit not supported") } + case arrow.FLOAT16: + if descr.PhysicalType() != parquet.Types.FixedLenByteArray { + return nil, errors.New("physical type for float16 must be fixed len byte array") + } + if len := arrow.Float16SizeBytes; descr.TypeLength() != len { + return nil, fmt.Errorf("fixed len byte array length for float16 must be %d", len) + } + return transferBinary(rdr, valueType), nil default: return nil, fmt.Errorf("no support for reading columns of type: %s", valueType.Name()) } @@ -563,6 +571,14 @@ func transferBinary(rdr file.RecordReader, dt arrow.DataType) *arrow.Chunked { chunks[idx] = array.MakeFromData(chunk.Data()) chunk.Release() } + case *arrow.Float16Type: + for idx, chunk := range chunks { + data := chunk.Data() + f16_data := array.NewData(dt, data.Len(), data.Buffers(), nil, data.NullN(), data.Offset()) + defer f16_data.Release() + chunks[idx] = array.NewFloat16Data(f16_data) + chunk.Release() + } } return arrow.NewChunked(dt, chunks) } diff --git a/go/parquet/pqarrow/encode_arrow.go b/go/parquet/pqarrow/encode_arrow.go index c3a0a50c43f45..8926d0ba51a07 100644 --- a/go/parquet/pqarrow/encode_arrow.go +++ b/go/parquet/pqarrow/encode_arrow.go @@ -25,16 +25,16 @@ import ( "time" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/internal/debug" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/internal/debug" ) // get the count of the number of leaf arrays for the type @@ -65,25 +65,25 @@ func nullableRoot(manifest *SchemaManifest, field *SchemaField) bool { return nullable } -// ArrowColumnWriter is a convenience object for easily writing arrow data to a specific +// arrowColumnWriter is a convenience object for easily writing arrow data to a specific // set of columns in a parquet file. Since a single arrow array can itself be a nested type // consisting of multiple columns of data, this will write to all of the appropriate leaves in // the parquet file, allowing easy writing of nested columns. -type ArrowColumnWriter struct { +type arrowColumnWriter struct { builders []*multipathLevelBuilder leafCount int colIdx int rgw file.RowGroupWriter } -// NewArrowColumnWriter returns a new writer using the chunked array to determine the number of leaf columns, +// newArrowColumnWriter returns a new writer using the chunked array to determine the number of leaf columns, // and the provided schema manifest to determine the paths for writing the columns. // // Using an arrow column writer is a convenience to avoid having to process the arrow array yourself // and determine the correct definition and repetition levels manually. -func NewArrowColumnWriter(data *arrow.Chunked, offset, size int64, manifest *SchemaManifest, rgw file.RowGroupWriter, col int) (ArrowColumnWriter, error) { +func newArrowColumnWriter(data *arrow.Chunked, offset, size int64, manifest *SchemaManifest, rgw file.RowGroupWriter, leafColIdx int) (arrowColumnWriter, error) { if data.Len() == 0 { - return ArrowColumnWriter{leafCount: calcLeafCount(data.DataType()), rgw: rgw}, nil + return arrowColumnWriter{leafCount: calcLeafCount(data.DataType()), rgw: rgw}, nil } var ( @@ -109,7 +109,7 @@ func NewArrowColumnWriter(data *arrow.Chunked, offset, size int64, manifest *Sch } if absPos >= int64(data.Len()) { - return ArrowColumnWriter{}, errors.New("cannot write data at offset past end of chunked array") + return arrowColumnWriter{}, errors.New("cannot write data at offset past end of chunked array") } leafCount := calcLeafCount(data.DataType()) @@ -118,9 +118,9 @@ func NewArrowColumnWriter(data *arrow.Chunked, offset, size int64, manifest *Sch // which is the one this instance will start writing for // colIdx := rgw.CurrentColumn() + 1 - schemaField, err := manifest.GetColumnField(col) + schemaField, err := manifest.GetColumnField(leafColIdx) if err != nil { - return ArrowColumnWriter{}, err + return arrowColumnWriter{}, err } isNullable = nullableRoot(manifest, schemaField) @@ -138,10 +138,10 @@ func NewArrowColumnWriter(data *arrow.Chunked, offset, size int64, manifest *Sch if arrToWrite.Len() > 0 { bldr, err := newMultipathLevelBuilder(arrToWrite, isNullable) if err != nil { - return ArrowColumnWriter{}, nil + return arrowColumnWriter{}, nil } if leafCount != bldr.leafCount() { - return ArrowColumnWriter{}, fmt.Errorf("data type leaf_count != builder leafcount: %d - %d", leafCount, bldr.leafCount()) + return arrowColumnWriter{}, fmt.Errorf("data type leaf_count != builder leafcount: %d - %d", leafCount, bldr.leafCount()) } builders = append(builders, bldr) } @@ -153,10 +153,10 @@ func NewArrowColumnWriter(data *arrow.Chunked, offset, size int64, manifest *Sch values += chunkWriteSize } - return ArrowColumnWriter{builders: builders, leafCount: leafCount, rgw: rgw, colIdx: col}, nil + return arrowColumnWriter{builders: builders, leafCount: leafCount, rgw: rgw, colIdx: leafColIdx}, nil } -func (acw *ArrowColumnWriter) Write(ctx context.Context) error { +func (acw *arrowColumnWriter) Write(ctx context.Context) error { arrCtx := arrowCtxFromContext(ctx) for leafIdx := 0; leafIdx < acw.leafCount; leafIdx++ { var ( @@ -582,6 +582,31 @@ func writeDenseArrow(ctx *arrowWriteContext, cw file.ColumnChunkWriter, leafArr } wr.WriteBatchSpaced(data, defLevels, repLevels, arr.NullBitmapBytes(), int64(arr.Data().Offset())) } + case *arrow.Float16Type: + typeLen := wr.Descr().TypeLength() + if typeLen != arrow.Float16SizeBytes { + return fmt.Errorf("%w: invalid FixedLenByteArray length to write from float16 column: %d", arrow.ErrInvalid, typeLen) + } + + arr := leafArr.(*array.Float16) + rawValues := arrow.Float16Traits.CastToBytes(arr.Values()) + data := make([]parquet.FixedLenByteArray, arr.Len()) + + if arr.NullN() == 0 { + for idx := range data { + offset := idx * typeLen + data[idx] = rawValues[offset : offset+typeLen] + } + _, err = wr.WriteBatch(data, defLevels, repLevels) + } else { + for idx := range data { + if arr.IsValid(idx) { + offset := idx * typeLen + data[idx] = rawValues[offset : offset+typeLen] + } + } + wr.WriteBatchSpaced(data, defLevels, repLevels, arr.NullBitmapBytes(), int64(arr.Data().Offset())) + } default: return fmt.Errorf("%w: invalid column type to write to FixedLenByteArray: %s", arrow.ErrInvalid, leafArr.DataType().Name()) } diff --git a/go/parquet/pqarrow/encode_arrow_test.go b/go/parquet/pqarrow/encode_arrow_test.go index 3c20cf2d4757b..95ea644dd8013 100644 --- a/go/parquet/pqarrow/encode_arrow_test.go +++ b/go/parquet/pqarrow/encode_arrow_test.go @@ -25,22 +25,22 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/bitutil" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/decimal256" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/types" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/compress" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" - "github.com/apache/arrow/go/v14/parquet/internal/testutils" - "github.com/apache/arrow/go/v14/parquet/pqarrow" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/bitutil" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/decimal256" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/types" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/compress" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/parquet/internal/testutils" + "github.com/apache/arrow/go/v15/parquet/pqarrow" + "github.com/apache/arrow/go/v15/parquet/schema" "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -132,26 +132,24 @@ func TestWriteArrowCols(t *testing.T) { tbl := makeDateTimeTypesTable(mem, false, false) defer tbl.Release() - psc, err := pqarrow.ToParquet(tbl.Schema(), nil, pqarrow.NewArrowWriterProperties(pqarrow.WithAllocator(mem))) - require.NoError(t, err) - - manifest, err := pqarrow.NewSchemaManifest(psc, nil, nil) - require.NoError(t, err) - sink := encoding.NewBufferWriter(0, mem) defer sink.Release() - writer := file.NewParquetWriter(sink, psc.Root(), file.WithWriterProps(parquet.NewWriterProperties(parquet.WithVersion(parquet.V2_4)))) - srgw := writer.AppendRowGroup() - ctx := pqarrow.NewArrowWriteContext(context.TODO(), nil) + fileWriter, err := pqarrow.NewFileWriter( + tbl.Schema(), + sink, + parquet.NewWriterProperties(parquet.WithVersion(parquet.V2_4)), + pqarrow.NewArrowWriterProperties(pqarrow.WithAllocator(mem)), + ) + require.NoError(t, err) + fileWriter.NewRowGroup() for i := int64(0); i < tbl.NumCols(); i++ { - acw, err := pqarrow.NewArrowColumnWriter(tbl.Column(int(i)).Data(), 0, tbl.NumRows(), manifest, srgw, int(i)) + colChunk := tbl.Column(int(i)).Data() + err := fileWriter.WriteColumnChunked(colChunk, 0, int64(colChunk.Len())) require.NoError(t, err) - require.NoError(t, acw.Write(ctx)) } - require.NoError(t, srgw.Close()) - require.NoError(t, writer.Close()) + require.NoError(t, fileWriter.Close()) expected := makeDateTimeTypesTable(mem, true, false) defer expected.Release() @@ -233,29 +231,24 @@ func TestWriteArrowInt96(t *testing.T) { tbl := makeDateTimeTypesTable(mem, false, false) defer tbl.Release() - props := pqarrow.NewArrowWriterProperties(pqarrow.WithDeprecatedInt96Timestamps(true), pqarrow.WithAllocator(mem)) - - psc, err := pqarrow.ToParquet(tbl.Schema(), nil, props) - require.NoError(t, err) - - manifest, err := pqarrow.NewSchemaManifest(psc, nil, nil) - require.NoError(t, err) - sink := encoding.NewBufferWriter(0, mem) defer sink.Release() - writer := file.NewParquetWriter(sink, psc.Root(), file.WithWriterProps(parquet.NewWriterProperties(parquet.WithAllocator(mem)))) - - srgw := writer.AppendRowGroup() - ctx := pqarrow.NewArrowWriteContext(context.TODO(), &props) + fileWriter, err := pqarrow.NewFileWriter( + tbl.Schema(), + sink, + parquet.NewWriterProperties(parquet.WithAllocator(mem)), + pqarrow.NewArrowWriterProperties(pqarrow.WithDeprecatedInt96Timestamps(true), pqarrow.WithAllocator(mem)), + ) + require.NoError(t, err) + fileWriter.NewRowGroup() for i := int64(0); i < tbl.NumCols(); i++ { - acw, err := pqarrow.NewArrowColumnWriter(tbl.Column(int(i)).Data(), 0, tbl.NumRows(), manifest, srgw, int(i)) + colChunk := tbl.Column(int(i)).Data() + err := fileWriter.WriteColumnChunked(colChunk, 0, int64(colChunk.Len())) require.NoError(t, err) - require.NoError(t, acw.Write(ctx)) } - require.NoError(t, srgw.Close()) - require.NoError(t, writer.Close()) + require.NoError(t, fileWriter.Close()) expected := makeDateTimeTypesTable(mem, false, false) defer expected.Release() @@ -292,31 +285,28 @@ func TestWriteArrowInt96(t *testing.T) { func writeTableToBuffer(t *testing.T, mem memory.Allocator, tbl arrow.Table, rowGroupSize int64, props pqarrow.ArrowWriterProperties) *memory.Buffer { sink := encoding.NewBufferWriter(0, mem) defer sink.Release() - wrprops := parquet.NewWriterProperties(parquet.WithVersion(parquet.V1_0)) - psc, err := pqarrow.ToParquet(tbl.Schema(), wrprops, props) - require.NoError(t, err) - manifest, err := pqarrow.NewSchemaManifest(psc, nil, nil) + fileWriter, err := pqarrow.NewFileWriter( + tbl.Schema(), + sink, + parquet.NewWriterProperties(parquet.WithVersion(parquet.V1_0)), + props, + ) require.NoError(t, err) - writer := file.NewParquetWriter(sink, psc.Root(), file.WithWriterProps(wrprops)) - ctx := pqarrow.NewArrowWriteContext(context.TODO(), &props) - offset := int64(0) for offset < tbl.NumRows() { sz := utils.Min(rowGroupSize, tbl.NumRows()-offset) - srgw := writer.AppendRowGroup() + fileWriter.NewRowGroup() for i := 0; i < int(tbl.NumCols()); i++ { - col := tbl.Column(i) - acw, err := pqarrow.NewArrowColumnWriter(col.Data(), offset, sz, manifest, srgw, i) + colChunk := tbl.Column(i).Data() + err := fileWriter.WriteColumnChunked(colChunk, 0, int64(colChunk.Len())) require.NoError(t, err) - require.NoError(t, acw.Write(ctx)) } - srgw.Close() offset += sz } - writer.Close() + require.NoError(t, fileWriter.Close()) return sink.Finish() } @@ -495,6 +485,8 @@ func getLogicalType(typ arrow.DataType) schema.LogicalType { return schema.DateLogicalType{} case arrow.DATE64: return schema.DateLogicalType{} + case arrow.FLOAT16: + return schema.Float16LogicalType{} case arrow.TIMESTAMP: ts := typ.(*arrow.TimestampType) adjustedUTC := len(ts.TimeZone) == 0 @@ -541,6 +533,8 @@ func getPhysicalType(typ arrow.DataType) parquet.Type { return parquet.Types.Float case arrow.FLOAT64: return parquet.Types.Double + case arrow.FLOAT16: + return parquet.Types.FixedLenByteArray case arrow.BINARY, arrow.LARGE_BINARY, arrow.STRING, arrow.LARGE_STRING: return parquet.Types.ByteArray case arrow.FIXED_SIZE_BINARY, arrow.DECIMAL: @@ -600,6 +594,8 @@ func (ps *ParquetIOTestSuite) makeSimpleSchema(typ arrow.DataType, rep parquet.R byteWidth = int32(typ.ByteWidth) case arrow.DecimalType: byteWidth = pqarrow.DecimalSize(typ.GetPrecision()) + case *arrow.Float16Type: + byteWidth = int32(typ.Bytes()) case *arrow.DictionaryType: valuesType := typ.ValueType switch dt := valuesType.(type) { @@ -607,6 +603,8 @@ func (ps *ParquetIOTestSuite) makeSimpleSchema(typ arrow.DataType, rep parquet.R byteWidth = int32(dt.ByteWidth) case arrow.DecimalType: byteWidth = pqarrow.DecimalSize(dt.GetPrecision()) + case *arrow.Float16Type: + byteWidth = int32(typ.Bytes()) } } @@ -1113,6 +1111,7 @@ var fullTypeList = []arrow.DataType{ arrow.FixedWidthTypes.Date32, arrow.PrimitiveTypes.Float32, arrow.PrimitiveTypes.Float64, + arrow.FixedWidthTypes.Float16, arrow.BinaryTypes.String, arrow.BinaryTypes.Binary, &arrow.FixedSizeBinaryType{ByteWidth: 10}, diff --git a/go/parquet/pqarrow/encode_dict_compute.go b/go/parquet/pqarrow/encode_dict_compute.go index 59e25cec19132..b43b4002ed0af 100644 --- a/go/parquet/pqarrow/encode_dict_compute.go +++ b/go/parquet/pqarrow/encode_dict_compute.go @@ -21,14 +21,14 @@ package pqarrow import ( "context" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/internal/debug" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/internal/debug" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" ) func isDictEncoding(enc parquet.Encoding) bool { diff --git a/go/parquet/pqarrow/encode_dict_nocompute.go b/go/parquet/pqarrow/encode_dict_nocompute.go index 31d0f1bc1ce27..73ec5cfc46682 100644 --- a/go/parquet/pqarrow/encode_dict_nocompute.go +++ b/go/parquet/pqarrow/encode_dict_nocompute.go @@ -21,8 +21,8 @@ package pqarrow import ( "errors" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/parquet/file" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/parquet/file" ) func writeDictionaryArrow(*arrowWriteContext, file.ColumnChunkWriter, arrow.Array, []int16, []int16, bool) (err error) { diff --git a/go/parquet/pqarrow/encode_dictionary_test.go b/go/parquet/pqarrow/encode_dictionary_test.go index 995f3fb329534..28ebee53e1b83 100644 --- a/go/parquet/pqarrow/encode_dictionary_test.go +++ b/go/parquet/pqarrow/encode_dictionary_test.go @@ -26,14 +26,14 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/compute" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/internal/testutils" - "github.com/apache/arrow/go/v14/parquet/pqarrow" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/compute" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/internal/testutils" + "github.com/apache/arrow/go/v15/parquet/pqarrow" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" diff --git a/go/parquet/pqarrow/file_reader.go b/go/parquet/pqarrow/file_reader.go index d91010c62c19d..3534cc87b78b5 100755 --- a/go/parquet/pqarrow/file_reader.go +++ b/go/parquet/pqarrow/file_reader.go @@ -23,13 +23,13 @@ import ( "sync" "sync/atomic" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/arrio" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/arrio" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/schema" "golang.org/x/sync/errgroup" "golang.org/x/xerrors" ) diff --git a/go/parquet/pqarrow/file_reader_test.go b/go/parquet/pqarrow/file_reader_test.go index d1f3ae1c984a2..9c1b4252f5fc6 100644 --- a/go/parquet/pqarrow/file_reader_test.go +++ b/go/parquet/pqarrow/file_reader_test.go @@ -26,13 +26,13 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/pqarrow" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/pqarrow" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/go/parquet/pqarrow/file_writer.go b/go/parquet/pqarrow/file_writer.go index aa0bae7b1fdfb..bc484ba243f87 100644 --- a/go/parquet/pqarrow/file_writer.go +++ b/go/parquet/pqarrow/file_writer.go @@ -22,12 +22,12 @@ import ( "fmt" "io" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/flight" - "github.com/apache/arrow/go/v14/internal/utils" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/metadata" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/flight" + "github.com/apache/arrow/go/v15/internal/utils" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/metadata" "golang.org/x/xerrors" ) @@ -305,7 +305,7 @@ func (fw *FileWriter) Close() error { // building of writing columns to a file via arrow data without needing to already have // a record or table. func (fw *FileWriter) WriteColumnChunked(data *arrow.Chunked, offset, size int64) error { - acw, err := NewArrowColumnWriter(data, offset, size, fw.manifest, fw.rgw, fw.colIdx) + acw, err := newArrowColumnWriter(data, offset, size, fw.manifest, fw.rgw, fw.colIdx) if err != nil { return err } diff --git a/go/parquet/pqarrow/helpers.go b/go/parquet/pqarrow/helpers.go index e8b740d12ba62..39d17cf80ae68 100644 --- a/go/parquet/pqarrow/helpers.go +++ b/go/parquet/pqarrow/helpers.go @@ -17,7 +17,7 @@ package pqarrow import ( - "github.com/apache/arrow/go/v14/arrow" + "github.com/apache/arrow/go/v15/arrow" ) func releaseArrays(arrays []arrow.Array) { diff --git a/go/parquet/pqarrow/path_builder.go b/go/parquet/pqarrow/path_builder.go index c4915d58dd59f..57a077956edea 100644 --- a/go/parquet/pqarrow/path_builder.go +++ b/go/parquet/pqarrow/path_builder.go @@ -21,11 +21,11 @@ import ( "sync/atomic" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/bitutils" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/bitutils" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" "golang.org/x/xerrors" ) @@ -206,7 +206,7 @@ func (n *listNode) fillForLast(rng, childRng *elemRange, ctx *pathWriteCtx) iter fillRepLevels(int(childRng.size()), n.repLevel, ctx) // once we've reached this point the following preconditions should hold: // 1. there are no more repeated path nodes to deal with - // 2. all elements in |range| reperesent contiguous elements in the child + // 2. all elements in |range| represent contiguous elements in the child // array (null values would have shortened the range to ensure all // remaining list elements are present, though they may be empty) // 3. no element of range spans a parent list (intermediate list nodes @@ -225,7 +225,7 @@ func (n *listNode) fillForLast(rng, childRng *elemRange, ctx *pathWriteCtx) iter // this is the start of a new list. we can be sure that it only applies to the // previous list (and doesn't jump to the start of any list further up in nesting - // due to the contraints mentioned earlier) + // due to the constraints mentioned earlier) ctx.AppendRepLevel(n.prevRepLevel) ctx.AppendRepLevels(int(sizeCheck.size())-1, n.repLevel) childRng.end = sizeCheck.end diff --git a/go/parquet/pqarrow/path_builder_test.go b/go/parquet/pqarrow/path_builder_test.go index 16b994582df02..e0a60262d3f4a 100644 --- a/go/parquet/pqarrow/path_builder_test.go +++ b/go/parquet/pqarrow/path_builder_test.go @@ -20,10 +20,10 @@ import ( "context" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/types" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/types" "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" diff --git a/go/parquet/pqarrow/properties.go b/go/parquet/pqarrow/properties.go index 98427bbf9398c..cc100fa80d87b 100755 --- a/go/parquet/pqarrow/properties.go +++ b/go/parquet/pqarrow/properties.go @@ -19,9 +19,9 @@ package pqarrow import ( "context" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet/internal/encoding" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet/internal/encoding" ) // ArrowWriterProperties are used to determine how to manipulate the arrow data diff --git a/go/parquet/pqarrow/reader_writer_test.go b/go/parquet/pqarrow/reader_writer_test.go index e3f66834c2cee..9d09bcec15da6 100644 --- a/go/parquet/pqarrow/reader_writer_test.go +++ b/go/parquet/pqarrow/reader_writer_test.go @@ -22,12 +22,12 @@ import ( "testing" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/array" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/pqarrow" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/array" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/pqarrow" "golang.org/x/exp/rand" "gonum.org/v1/gonum/stat/distuv" ) diff --git a/go/parquet/pqarrow/schema.go b/go/parquet/pqarrow/schema.go index 9ba8554898986..95c477c78b87d 100644 --- a/go/parquet/pqarrow/schema.go +++ b/go/parquet/pqarrow/schema.go @@ -22,15 +22,15 @@ import ( "math" "strconv" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/decimal128" - "github.com/apache/arrow/go/v14/arrow/flight" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/file" - "github.com/apache/arrow/go/v14/parquet/metadata" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/decimal128" + "github.com/apache/arrow/go/v15/arrow/flight" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/file" + "github.com/apache/arrow/go/v15/parquet/metadata" + "github.com/apache/arrow/go/v15/parquet/schema" "golang.org/x/xerrors" ) @@ -344,6 +344,10 @@ func fieldToNode(name string, field arrow.Field, props *parquet.WriterProperties } else { logicalType = schema.NewTimeLogicalType(true, schema.TimeUnitMicros) } + case arrow.FLOAT16: + typ = parquet.Types.FixedLenByteArray + length = arrow.Float16SizeBytes + logicalType = schema.Float16LogicalType{} case arrow.STRUCT: return structToNode(field.Type.(*arrow.StructType), field.Name, field.Nullable, props, arrprops) case arrow.FIXED_SIZE_LIST, arrow.LIST: @@ -597,6 +601,8 @@ func arrowFromFLBA(logical schema.LogicalType, length int) (arrow.DataType, erro return arrowDecimal(logtype), nil case schema.NoLogicalType, schema.IntervalLogicalType, schema.UUIDLogicalType: return &arrow.FixedSizeBinaryType{ByteWidth: int(length)}, nil + case schema.Float16LogicalType: + return &arrow.Float16Type{}, nil default: return nil, xerrors.New("unhandled logical type " + logical.String() + " for fixed-length byte array") } diff --git a/go/parquet/pqarrow/schema_test.go b/go/parquet/pqarrow/schema_test.go index e195358af6c98..ee5aad8913470 100644 --- a/go/parquet/pqarrow/schema_test.go +++ b/go/parquet/pqarrow/schema_test.go @@ -20,15 +20,15 @@ import ( "encoding/base64" "testing" - "github.com/apache/arrow/go/v14/arrow" - "github.com/apache/arrow/go/v14/arrow/flight" - "github.com/apache/arrow/go/v14/arrow/ipc" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/types" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/metadata" - "github.com/apache/arrow/go/v14/parquet/pqarrow" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow" + "github.com/apache/arrow/go/v15/arrow/flight" + "github.com/apache/arrow/go/v15/arrow/ipc" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/types" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/metadata" + "github.com/apache/arrow/go/v15/parquet/pqarrow" + "github.com/apache/arrow/go/v15/parquet/schema" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -280,6 +280,25 @@ func TestConvertArrowDecimals(t *testing.T) { } } +func TestConvertArrowFloat16(t *testing.T) { + parquetFields := make(schema.FieldList, 0) + arrowFields := make([]arrow.Field, 0) + + parquetFields = append(parquetFields, schema.Must(schema.NewPrimitiveNodeLogical("float16", parquet.Repetitions.Required, + schema.Float16LogicalType{}, parquet.Types.FixedLenByteArray, 2, -1))) + arrowFields = append(arrowFields, arrow.Field{Name: "float16", Type: &arrow.Float16Type{}}) + + arrowSchema := arrow.NewSchema(arrowFields, nil) + parquetSchema := schema.NewSchema(schema.MustGroup(schema.NewGroupNode("schema", parquet.Repetitions.Repeated, parquetFields, -1))) + + result, err := pqarrow.ToParquet(arrowSchema, nil, pqarrow.NewArrowWriterProperties(pqarrow.WithDeprecatedInt96Timestamps(true))) + assert.NoError(t, err) + assert.True(t, parquetSchema.Equals(result)) + for i := 0; i < parquetSchema.NumColumns(); i++ { + assert.Truef(t, parquetSchema.Column(i).Equals(result.Column(i)), "Column %d didn't match: %s", i, parquetSchema.Column(i).Name()) + } +} + func TestCoerceTImestampV1(t *testing.T) { parquetFields := make(schema.FieldList, 0) arrowFields := make([]arrow.Field, 0) @@ -418,7 +437,6 @@ func TestUnsupportedTypes(t *testing.T) { typ arrow.DataType }{ // Non-exhaustive list of unsupported types - {typ: &arrow.Float16Type{}}, {typ: &arrow.DurationType{}}, {typ: &arrow.DayTimeIntervalType{}}, {typ: &arrow.MonthIntervalType{}}, diff --git a/go/parquet/reader_properties.go b/go/parquet/reader_properties.go index dd2032958a51c..be2377527d782 100644 --- a/go/parquet/reader_properties.go +++ b/go/parquet/reader_properties.go @@ -21,8 +21,8 @@ import ( "fmt" "io" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/internal/utils" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/internal/utils" ) // ReaderProperties are used to define how the file reader will handle buffering and allocating buffers diff --git a/go/parquet/reader_writer_properties_test.go b/go/parquet/reader_writer_properties_test.go index 6bd93a02550e4..698129471adda 100644 --- a/go/parquet/reader_writer_properties_test.go +++ b/go/parquet/reader_writer_properties_test.go @@ -20,9 +20,9 @@ import ( "bytes" "testing" - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/compress" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/compress" "github.com/stretchr/testify/assert" ) diff --git a/go/parquet/schema/column.go b/go/parquet/schema/column.go index 1575037d59d23..02d0d83e8465e 100644 --- a/go/parquet/schema/column.go +++ b/go/parquet/schema/column.go @@ -20,8 +20,8 @@ import ( "fmt" "strings" - "github.com/apache/arrow/go/v14/parquet" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" ) // Column encapsulates the information necessary to interpret primitive diff --git a/go/parquet/schema/converted_types.go b/go/parquet/schema/converted_types.go index 6a4702b819259..58051dba7f19b 100644 --- a/go/parquet/schema/converted_types.go +++ b/go/parquet/schema/converted_types.go @@ -17,7 +17,7 @@ package schema import ( - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" ) // ConvertedType corresponds to the ConvertedType in the parquet.Thrift, diff --git a/go/parquet/schema/converted_types_test.go b/go/parquet/schema/converted_types_test.go index a4f9ea0018196..8b8e061466474 100644 --- a/go/parquet/schema/converted_types_test.go +++ b/go/parquet/schema/converted_types_test.go @@ -19,7 +19,7 @@ package schema_test import ( "testing" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/parquet/schema" "github.com/stretchr/testify/assert" ) diff --git a/go/parquet/schema/helpers.go b/go/parquet/schema/helpers.go index 23a5a0b667f26..13075a065f073 100644 --- a/go/parquet/schema/helpers.go +++ b/go/parquet/schema/helpers.go @@ -17,7 +17,7 @@ package schema import ( - "github.com/apache/arrow/go/v14/parquet" + "github.com/apache/arrow/go/v15/parquet" "golang.org/x/xerrors" ) diff --git a/go/parquet/schema/helpers_test.go b/go/parquet/schema/helpers_test.go index 6d136e4ecd155..98f3cab36d433 100644 --- a/go/parquet/schema/helpers_test.go +++ b/go/parquet/schema/helpers_test.go @@ -21,8 +21,8 @@ import ( "strings" "testing" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/schema" "github.com/stretchr/testify/assert" ) diff --git a/go/parquet/schema/logical_types.go b/go/parquet/schema/logical_types.go index a0607a1a2f62d..69e69363887cd 100644 --- a/go/parquet/schema/logical_types.go +++ b/go/parquet/schema/logical_types.go @@ -20,10 +20,10 @@ import ( "fmt" "math" - "github.com/apache/arrow/go/v14/internal/json" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/internal/debug" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/internal/json" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/internal/debug" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" ) // DecimalMetadata is a struct for managing scale and precision information between @@ -68,6 +68,8 @@ func getLogicalType(l *format.LogicalType) LogicalType { return BSONLogicalType{} case l.IsSetUUID(): return UUIDLogicalType{} + case l.IsSetFLOAT16(): + return Float16LogicalType{} case l == nil: return NoLogicalType{} default: @@ -1064,6 +1066,50 @@ func (IntervalLogicalType) Equals(rhs LogicalType) bool { return ok } +// Float16LogicalType can only be used with a FixedLength byte array column +// that is exactly 2 bytes long +type Float16LogicalType struct{ baseLogicalType } + +func (Float16LogicalType) SortOrder() SortOrder { + return SortSIGNED +} + +func (Float16LogicalType) MarshalJSON() ([]byte, error) { + return json.Marshal(map[string]string{"Type": Float16LogicalType{}.String()}) +} + +func (Float16LogicalType) String() string { + return "Float16" +} + +func (Float16LogicalType) ToConvertedType() (ConvertedType, DecimalMetadata) { + return ConvertedTypes.None, DecimalMetadata{} +} + +func (Float16LogicalType) IsCompatible(c ConvertedType, dec DecimalMetadata) bool { + if dec.IsSet { + return false + } + switch c { + case ConvertedTypes.None, ConvertedTypes.NA: + return true + } + return false +} + +func (Float16LogicalType) IsApplicable(t parquet.Type, tlen int32) bool { + return t == parquet.Types.FixedLenByteArray && tlen == 2 +} + +func (Float16LogicalType) toThrift() *format.LogicalType { + return &format.LogicalType{FLOAT16: format.NewFloat16Type()} +} + +func (Float16LogicalType) Equals(rhs LogicalType) bool { + _, ok := rhs.(Float16LogicalType) + return ok +} + type NullLogicalType struct{ baseLogicalType } func (NullLogicalType) SortOrder() SortOrder { diff --git a/go/parquet/schema/logical_types_test.go b/go/parquet/schema/logical_types_test.go index 8531a061abb77..c371b47714f41 100644 --- a/go/parquet/schema/logical_types_test.go +++ b/go/parquet/schema/logical_types_test.go @@ -19,9 +19,9 @@ package schema_test import ( "testing" - "github.com/apache/arrow/go/v14/internal/json" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/internal/json" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/schema" "github.com/stretchr/testify/assert" ) @@ -158,6 +158,7 @@ func TestNewTypeIncompatibility(t *testing.T) { expected schema.LogicalType }{ {"uuid", schema.UUIDLogicalType{}, schema.UUIDLogicalType{}}, + {"float16", schema.Float16LogicalType{}, schema.Float16LogicalType{}}, {"null", schema.NullLogicalType{}, schema.NullLogicalType{}}, {"not-utc-time_milli", schema.NewTimeLogicalType(false /* adjutedToUTC */, schema.TimeUnitMillis), &schema.TimeLogicalType{}}, {"not-utc-time-micro", schema.NewTimeLogicalType(false /* adjutedToUTC */, schema.TimeUnitMicros), &schema.TimeLogicalType{}}, @@ -224,6 +225,7 @@ func TestLogicalTypeProperties(t *testing.T) { {"json", schema.JSONLogicalType{}, false, true, true}, {"bson", schema.BSONLogicalType{}, false, true, true}, {"uuid", schema.UUIDLogicalType{}, false, true, true}, + {"float16", schema.Float16LogicalType{}, false, true, true}, {"nological", schema.NoLogicalType{}, false, false, true}, {"unknown", schema.UnknownLogicalType{}, false, false, false}, } @@ -358,6 +360,14 @@ func TestLogicalInapplicableTypes(t *testing.T) { assert.False(t, logical.IsApplicable(tt.typ, tt.len)) }) } + + logical = schema.Float16LogicalType{} + assert.True(t, logical.IsApplicable(parquet.Types.FixedLenByteArray, 2)) + for _, tt := range tests { + t.Run("float16 "+tt.name, func(t *testing.T) { + assert.False(t, logical.IsApplicable(tt.typ, tt.len)) + }) + } } func TestDecimalLogicalTypeApplicability(t *testing.T) { @@ -445,6 +455,7 @@ func TestLogicalTypeRepresentation(t *testing.T) { {"json", schema.JSONLogicalType{}, "JSON", `{"Type": "JSON"}`}, {"bson", schema.BSONLogicalType{}, "BSON", `{"Type": "BSON"}`}, {"uuid", schema.UUIDLogicalType{}, "UUID", `{"Type": "UUID"}`}, + {"float16", schema.Float16LogicalType{}, "Float16", `{"Type": "Float16"}`}, {"none", schema.NoLogicalType{}, "None", `{"Type": "None"}`}, } @@ -490,6 +501,7 @@ func TestLogicalTypeSortOrder(t *testing.T) { {"json", schema.JSONLogicalType{}, schema.SortUNSIGNED}, {"bson", schema.BSONLogicalType{}, schema.SortUNSIGNED}, {"uuid", schema.UUIDLogicalType{}, schema.SortUNSIGNED}, + {"float16", schema.Float16LogicalType{}, schema.SortSIGNED}, {"none", schema.NoLogicalType{}, schema.SortUNKNOWN}, } diff --git a/go/parquet/schema/node.go b/go/parquet/schema/node.go index 28259c8d61f58..ff23624afa35d 100644 --- a/go/parquet/schema/node.go +++ b/go/parquet/schema/node.go @@ -19,8 +19,8 @@ package schema import ( "fmt" - "github.com/apache/arrow/go/v14/parquet" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" "github.com/apache/thrift/lib/go/thrift" "golang.org/x/xerrors" ) diff --git a/go/parquet/schema/reflection.go b/go/parquet/schema/reflection.go index bc863f1390030..c0c8e0533efb0 100644 --- a/go/parquet/schema/reflection.go +++ b/go/parquet/schema/reflection.go @@ -22,8 +22,9 @@ import ( "strconv" "strings" - "github.com/apache/arrow/go/v14/parquet" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/parquet" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" "golang.org/x/xerrors" ) @@ -159,6 +160,8 @@ func (t *taggedInfo) UpdateLogicalTypes() { return BSONLogicalType{} case "uuid": return UUIDLogicalType{} + case "float16": + return Float16LogicalType{} default: panic(fmt.Errorf("invalid logical type specified: %s", t)) } @@ -373,6 +376,9 @@ func typeToNode(name string, typ reflect.Type, repType parquet.Repetition, info } return Must(MapOf(name, key, value, repType, fieldID)) case reflect.Struct: + if typ == reflect.TypeOf(float16.Num{}) { + return MustPrimitive(NewPrimitiveNodeLogical(name, repType, Float16LogicalType{}, parquet.Types.FixedLenByteArray, 2, fieldID)) + } // structs are Group nodes fields := make(FieldList, 0) for i := 0; i < typ.NumField(); i++ { diff --git a/go/parquet/schema/reflection_test.go b/go/parquet/schema/reflection_test.go index e18feada7a796..e3a880cacc1e8 100644 --- a/go/parquet/schema/reflection_test.go +++ b/go/parquet/schema/reflection_test.go @@ -22,8 +22,9 @@ import ( "reflect" "testing" - "github.com/apache/arrow/go/v14/parquet" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/arrow/float16" + "github.com/apache/arrow/go/v15/parquet" + "github.com/apache/arrow/go/v15/parquet/schema" "github.com/stretchr/testify/assert" ) @@ -152,6 +153,9 @@ func ExampleNewSchemaFromStruct_logicaltypes() { JSON string `parquet:"logical=json"` BSON []byte `parquet:"logical=BSON"` UUID [16]byte `parquet:"logical=uuid"` + Float16 [2]byte `parquet:"logical=float16"` + Float16Optional *[2]byte `parquet:"logical=float16"` + Float16Num float16.Num } sc, err := schema.NewSchemaFromStruct(LogicalTypes{}) @@ -180,6 +184,9 @@ func ExampleNewSchemaFromStruct_logicaltypes() { // required byte_array field_id=-1 JSON (JSON); // required byte_array field_id=-1 BSON (BSON); // required fixed_len_byte_array field_id=-1 UUID (UUID); + // required fixed_len_byte_array field_id=-1 Float16 (Float16); + // optional fixed_len_byte_array field_id=-1 Float16Optional (Float16); + // required fixed_len_byte_array field_id=-1 Float16Num (Float16); // } } diff --git a/go/parquet/schema/schema.go b/go/parquet/schema/schema.go index 1b3363f58d11f..ace2775763a06 100644 --- a/go/parquet/schema/schema.go +++ b/go/parquet/schema/schema.go @@ -35,8 +35,8 @@ import ( "io" "strings" - "github.com/apache/arrow/go/v14/parquet" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" "golang.org/x/xerrors" ) diff --git a/go/parquet/schema/schema_element_test.go b/go/parquet/schema/schema_element_test.go index e9a2ef5eb31f6..d190ffe5a253a 100644 --- a/go/parquet/schema/schema_element_test.go +++ b/go/parquet/schema/schema_element_test.go @@ -19,8 +19,8 @@ package schema import ( "testing" - "github.com/apache/arrow/go/v14/parquet" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" ) @@ -159,6 +159,10 @@ func (s *SchemaElementConstructionSuite) TestSimple() { "uuid", UUIDLogicalType{}, parquet.Types.FixedLenByteArray, 16, false, ConvertedTypes.NA, true, func(e *format.SchemaElement) bool { return e.LogicalType.IsSetUUID() }, }, nil}, + {"float16", &schemaElementConstructArgs{ + "float16", Float16LogicalType{}, parquet.Types.FixedLenByteArray, 2, false, ConvertedTypes.NA, true, + func(e *format.SchemaElement) bool { return e.LogicalType.IsSetFLOAT16() }, + }, nil}, {"none", &schemaElementConstructArgs{ "none", NoLogicalType{}, parquet.Types.Int64, -1, false, ConvertedTypes.NA, false, checkNone, @@ -425,7 +429,8 @@ func TestSchemaElementNestedSerialization(t *testing.T) { timestampNode := MustPrimitive(NewPrimitiveNodeLogical("timestamp" /*name */, parquet.Repetitions.Required, NewTimestampLogicalType(false /* adjustedToUTC */, TimeUnitNanos), parquet.Types.Int64, -1 /* type len */, -1 /* fieldID */)) intNode := MustPrimitive(NewPrimitiveNodeLogical("int" /*name */, parquet.Repetitions.Required, NewIntLogicalType(64 /* bitWidth */, false /* signed */), parquet.Types.Int64, -1 /* type len */, -1 /* fieldID */)) decimalNode := MustPrimitive(NewPrimitiveNodeLogical("decimal" /*name */, parquet.Repetitions.Required, NewDecimalLogicalType(16 /* precision */, 6 /* scale */), parquet.Types.Int64, -1 /* type len */, -1 /* fieldID */)) - listNode := MustGroup(NewGroupNodeLogical("list" /*name */, parquet.Repetitions.Repeated, []Node{strNode, dateNode, jsonNode, uuidNode, timestampNode, intNode, decimalNode}, NewListLogicalType(), -1 /* fieldID */)) + float16Node := MustPrimitive(NewPrimitiveNodeLogical("float16" /*name */, parquet.Repetitions.Required, Float16LogicalType{}, parquet.Types.FixedLenByteArray, 2 /* type len */, - /* fieldID */ 1)) + listNode := MustGroup(NewGroupNodeLogical("list" /*name */, parquet.Repetitions.Repeated, []Node{strNode, dateNode, jsonNode, uuidNode, timestampNode, intNode, decimalNode, float16Node}, NewListLogicalType(), -1 /* fieldID */)) listElems := ToThrift(listNode) assert.Equal(t, "list", listElems[0].Name) @@ -440,6 +445,7 @@ func TestSchemaElementNestedSerialization(t *testing.T) { assert.True(t, listElems[5].LogicalType.IsSetTIMESTAMP()) assert.True(t, listElems[6].LogicalType.IsSetINTEGER()) assert.True(t, listElems[7].LogicalType.IsSetDECIMAL()) + assert.True(t, listElems[8].LogicalType.IsSetFLOAT16()) mapNode := MustGroup(NewGroupNodeLogical("map" /* name */, parquet.Repetitions.Required, []Node{}, MapLogicalType{}, -1 /* fieldID */)) mapElems := ToThrift(mapNode) @@ -486,6 +492,7 @@ func TestLogicalTypeSerializationRoundTrip(t *testing.T) { {"json", JSONLogicalType{}, parquet.Types.ByteArray, -1}, {"bson", BSONLogicalType{}, parquet.Types.ByteArray, -1}, {"uuid", UUIDLogicalType{}, parquet.Types.FixedLenByteArray, 16}, + {"float16", Float16LogicalType{}, parquet.Types.FixedLenByteArray, 2}, {"none", NoLogicalType{}, parquet.Types.Boolean, -1}, } diff --git a/go/parquet/schema/schema_flatten_test.go b/go/parquet/schema/schema_flatten_test.go index 90741ae9b43bb..34f4ac8d3c450 100644 --- a/go/parquet/schema/schema_flatten_test.go +++ b/go/parquet/schema/schema_flatten_test.go @@ -19,8 +19,8 @@ package schema import ( "testing" - "github.com/apache/arrow/go/v14/parquet" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" ) diff --git a/go/parquet/schema/schema_test.go b/go/parquet/schema/schema_test.go index e69a13e07986e..cc43c3856d68e 100644 --- a/go/parquet/schema/schema_test.go +++ b/go/parquet/schema/schema_test.go @@ -20,9 +20,9 @@ import ( "os" "testing" - "github.com/apache/arrow/go/v14/parquet" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" - "github.com/apache/arrow/go/v14/parquet/schema" + "github.com/apache/arrow/go/v15/parquet" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/parquet/schema" "github.com/apache/thrift/lib/go/thrift" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" @@ -635,6 +635,10 @@ func TestPanicSchemaNodeCreation(t *testing.T) { schema.MustPrimitive(schema.NewPrimitiveNodeLogical("uuid" /* name */, parquet.Repetitions.Required, schema.UUIDLogicalType{}, parquet.Types.FixedLenByteArray, 64 /* type len */, -1 /* fieldID */)) }, "incompatible primitive length") + assert.Panics(t, func() { + schema.MustPrimitive(schema.NewPrimitiveNodeLogical("float16" /* name */, parquet.Repetitions.Required, schema.Float16LogicalType{}, parquet.Types.FixedLenByteArray, 4 /* type len */, -1 /* fieldID */)) + }, "incompatible primitive length") + assert.Panics(t, func() { schema.MustPrimitive(schema.NewPrimitiveNodeLogical("negative_len" /* name */, parquet.Repetitions.Required, schema.NoLogicalType{}, parquet.Types.FixedLenByteArray, -16 /* type len */, -1 /* fieldID */)) }, "non-positive length for fixed length binary") diff --git a/go/parquet/types.go b/go/parquet/types.go index 7b6df2ea9d16c..5b6f8fb0251d9 100644 --- a/go/parquet/types.go +++ b/go/parquet/types.go @@ -24,8 +24,8 @@ import ( "time" "unsafe" - "github.com/apache/arrow/go/v14/arrow" - format "github.com/apache/arrow/go/v14/parquet/internal/gen-go/parquet" + "github.com/apache/arrow/go/v15/arrow" + format "github.com/apache/arrow/go/v15/parquet/internal/gen-go/parquet" ) const ( diff --git a/go/parquet/writer_properties.go b/go/parquet/writer_properties.go index e4199f451f5f9..9e33bddf7faa7 100644 --- a/go/parquet/writer_properties.go +++ b/go/parquet/writer_properties.go @@ -17,8 +17,8 @@ package parquet import ( - "github.com/apache/arrow/go/v14/arrow/memory" - "github.com/apache/arrow/go/v14/parquet/compress" + "github.com/apache/arrow/go/v15/arrow/memory" + "github.com/apache/arrow/go/v15/parquet/compress" ) // Constants for default property values used for the default reader, writer and column props. @@ -46,7 +46,7 @@ const ( DefaultStatsEnabled = true // If the stats are larger than 4K the writer will skip writing them out anyways. DefaultMaxStatsSize int64 = 4096 - DefaultCreatedBy = "parquet-go version 14.0.0-SNAPSHOT" + DefaultCreatedBy = "parquet-go version 15.0.0-SNAPSHOT" DefaultRootName = "schema" ) diff --git a/java/adapter/avro/pom.xml b/java/adapter/avro/pom.xml index 6735094d11bb4..c0410ea4c2314 100644 --- a/java/adapter/avro/pom.xml +++ b/java/adapter/avro/pom.xml @@ -16,7 +16,7 @@ org.apache.arrow arrow-java-root - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT ../../pom.xml diff --git a/java/adapter/jdbc/pom.xml b/java/adapter/jdbc/pom.xml index 127202c5215ec..2490f708e6f24 100644 --- a/java/adapter/jdbc/pom.xml +++ b/java/adapter/jdbc/pom.xml @@ -16,7 +16,7 @@ org.apache.arrow arrow-java-root - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT ../../pom.xml diff --git a/java/adapter/orc/pom.xml b/java/adapter/orc/pom.xml index ec85bf8fe720c..72ba13ea81738 100644 --- a/java/adapter/orc/pom.xml +++ b/java/adapter/orc/pom.xml @@ -111,7 +111,7 @@ org.apache.arrow arrow-java-root - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT ../../pom.xml diff --git a/java/algorithm/pom.xml b/java/algorithm/pom.xml index 235c9440f9196..8c6a9fb0151ef 100644 --- a/java/algorithm/pom.xml +++ b/java/algorithm/pom.xml @@ -14,7 +14,7 @@ org.apache.arrow arrow-java-root - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT arrow-algorithm Arrow Algorithms diff --git a/java/algorithm/src/main/java/org/apache/arrow/algorithm/sort/DefaultVectorComparators.java b/java/algorithm/src/main/java/org/apache/arrow/algorithm/sort/DefaultVectorComparators.java index 4f9c8b7d71bab..588876aa99059 100644 --- a/java/algorithm/src/main/java/org/apache/arrow/algorithm/sort/DefaultVectorComparators.java +++ b/java/algorithm/src/main/java/org/apache/arrow/algorithm/sort/DefaultVectorComparators.java @@ -32,11 +32,13 @@ import org.apache.arrow.vector.Decimal256Vector; import org.apache.arrow.vector.DecimalVector; import org.apache.arrow.vector.DurationVector; +import org.apache.arrow.vector.FixedSizeBinaryVector; import org.apache.arrow.vector.Float4Vector; import org.apache.arrow.vector.Float8Vector; import org.apache.arrow.vector.IntVector; import org.apache.arrow.vector.IntervalDayVector; import org.apache.arrow.vector.IntervalMonthDayNanoVector; +import org.apache.arrow.vector.NullVector; import org.apache.arrow.vector.SmallIntVector; import org.apache.arrow.vector.TimeMicroVector; import org.apache.arrow.vector.TimeMilliVector; @@ -50,7 +52,9 @@ import org.apache.arrow.vector.UInt8Vector; import org.apache.arrow.vector.ValueVector; import org.apache.arrow.vector.VariableWidthVector; -import org.apache.arrow.vector.complex.BaseRepeatedValueVector; +import org.apache.arrow.vector.complex.FixedSizeListVector; +import org.apache.arrow.vector.complex.RepeatedValueVector; +import org.apache.arrow.vector.holders.NullableFixedSizeBinaryHolder; /** * Default comparator implementations for different types of vectors. @@ -111,13 +115,21 @@ public static VectorValueComparator createDefaultComp return (VectorValueComparator) new TimeSecComparator(); } else if (vector instanceof TimeStampVector) { return (VectorValueComparator) new TimeStampComparator(); + } else if (vector instanceof FixedSizeBinaryVector) { + return (VectorValueComparator) new FixedSizeBinaryComparator(); } } else if (vector instanceof VariableWidthVector) { return (VectorValueComparator) new VariableWidthComparator(); - } else if (vector instanceof BaseRepeatedValueVector) { + } else if (vector instanceof RepeatedValueVector) { VectorValueComparator innerComparator = - createDefaultComparator(((BaseRepeatedValueVector) vector).getDataVector()); + createDefaultComparator(((RepeatedValueVector) vector).getDataVector()); return new RepeatedValueComparator(innerComparator); + } else if (vector instanceof FixedSizeListVector) { + VectorValueComparator innerComparator = + createDefaultComparator(((FixedSizeListVector) vector).getDataVector()); + return new FixedSizeListComparator(innerComparator); + } else if (vector instanceof NullVector) { + return (VectorValueComparator) new NullComparator(); } throw new IllegalArgumentException("No default comparator for " + vector.getClass().getCanonicalName()); @@ -674,6 +686,61 @@ public VectorValueComparator createNew() { } } + /** + * Default comparator for {@link org.apache.arrow.vector.FixedSizeBinaryVector}. + * The comparison is in lexicographic order, with null comes first. + */ + public static class FixedSizeBinaryComparator extends VectorValueComparator { + + @Override + public int compare(int index1, int index2) { + NullableFixedSizeBinaryHolder holder1 = new NullableFixedSizeBinaryHolder(); + NullableFixedSizeBinaryHolder holder2 = new NullableFixedSizeBinaryHolder(); + vector1.get(index1, holder1); + vector2.get(index2, holder2); + + return ByteFunctionHelpers.compare( + holder1.buffer, 0, holder1.byteWidth, holder2.buffer, 0, holder2.byteWidth); + } + + @Override + public int compareNotNull(int index1, int index2) { + NullableFixedSizeBinaryHolder holder1 = new NullableFixedSizeBinaryHolder(); + NullableFixedSizeBinaryHolder holder2 = new NullableFixedSizeBinaryHolder(); + vector1.get(index1, holder1); + vector2.get(index2, holder2); + + return ByteFunctionHelpers.compare( + holder1.buffer, 0, holder1.byteWidth, holder2.buffer, 0, holder2.byteWidth); + } + + @Override + public VectorValueComparator createNew() { + return new FixedSizeBinaryComparator(); + } + } + + /** + * Default comparator for {@link org.apache.arrow.vector.NullVector}. + */ + public static class NullComparator extends VectorValueComparator { + @Override + public int compare(int index1, int index2) { + // Values are always equal (and are always null). + return 0; + } + + @Override + public int compareNotNull(int index1, int index2) { + throw new AssertionError("Cannot compare non-null values in a NullVector."); + } + + @Override + public VectorValueComparator createNew() { + return new NullComparator(); + } + } + /** * Default comparator for {@link org.apache.arrow.vector.VariableWidthVector}. * The comparison is in lexicographic order, with null comes first. @@ -705,14 +772,14 @@ public VectorValueComparator createNew() { } /** - * Default comparator for {@link BaseRepeatedValueVector}. + * Default comparator for {@link RepeatedValueVector}. * It works by comparing the underlying vector in a lexicographic order. * @param inner vector type. */ public static class RepeatedValueComparator - extends VectorValueComparator { + extends VectorValueComparator { - private VectorValueComparator innerComparator; + private final VectorValueComparator innerComparator; public RepeatedValueComparator(VectorValueComparator innerComparator) { this.innerComparator = innerComparator; @@ -720,16 +787,16 @@ public RepeatedValueComparator(VectorValueComparator innerComparator) { @Override public int compareNotNull(int index1, int index2) { - int startIdx1 = vector1.getOffsetBuffer().getInt(index1 * OFFSET_WIDTH); - int startIdx2 = vector2.getOffsetBuffer().getInt(index2 * OFFSET_WIDTH); + int startIdx1 = vector1.getOffsetBuffer().getInt((long) index1 * OFFSET_WIDTH); + int startIdx2 = vector2.getOffsetBuffer().getInt((long) index2 * OFFSET_WIDTH); - int endIdx1 = vector1.getOffsetBuffer().getInt((index1 + 1) * OFFSET_WIDTH); - int endIdx2 = vector2.getOffsetBuffer().getInt((index2 + 1) * OFFSET_WIDTH); + int endIdx1 = vector1.getOffsetBuffer().getInt((long) (index1 + 1) * OFFSET_WIDTH); + int endIdx2 = vector2.getOffsetBuffer().getInt((long) (index2 + 1) * OFFSET_WIDTH); int length1 = endIdx1 - startIdx1; int length2 = endIdx2 - startIdx2; - int length = length1 < length2 ? length1 : length2; + int length = Math.min(length1, length2); for (int i = 0; i < length; i++) { int result = innerComparator.compare(startIdx1 + i, startIdx2 + i); @@ -741,13 +808,60 @@ public int compareNotNull(int index1, int index2) { } @Override - public VectorValueComparator createNew() { + public VectorValueComparator createNew() { VectorValueComparator newInnerComparator = innerComparator.createNew(); return new RepeatedValueComparator<>(newInnerComparator); } @Override - public void attachVectors(BaseRepeatedValueVector vector1, BaseRepeatedValueVector vector2) { + public void attachVectors(RepeatedValueVector vector1, RepeatedValueVector vector2) { + this.vector1 = vector1; + this.vector2 = vector2; + + innerComparator.attachVectors((T) vector1.getDataVector(), (T) vector2.getDataVector()); + } + } + + /** + * Default comparator for {@link RepeatedValueVector}. + * It works by comparing the underlying vector in a lexicographic order. + * @param inner vector type. + */ + public static class FixedSizeListComparator + extends VectorValueComparator { + + private final VectorValueComparator innerComparator; + + public FixedSizeListComparator(VectorValueComparator innerComparator) { + this.innerComparator = innerComparator; + } + + @Override + public int compareNotNull(int index1, int index2) { + int length1 = vector1.getListSize(); + int length2 = vector2.getListSize(); + + int length = Math.min(length1, length2); + int startIdx1 = vector1.getElementStartIndex(index1); + int startIdx2 = vector2.getElementStartIndex(index2); + + for (int i = 0; i < length; i++) { + int result = innerComparator.compare(startIdx1 + i, startIdx2 + i); + if (result != 0) { + return result; + } + } + return length1 - length2; + } + + @Override + public VectorValueComparator createNew() { + VectorValueComparator newInnerComparator = innerComparator.createNew(); + return new FixedSizeListComparator<>(newInnerComparator); + } + + @Override + public void attachVectors(FixedSizeListVector vector1, FixedSizeListVector vector2) { this.vector1 = vector1; this.vector2 = vector2; diff --git a/java/algorithm/src/test/java/org/apache/arrow/algorithm/sort/TestDefaultVectorComparator.java b/java/algorithm/src/test/java/org/apache/arrow/algorithm/sort/TestDefaultVectorComparator.java index bdae85110aa62..43c634b7647fb 100644 --- a/java/algorithm/src/test/java/org/apache/arrow/algorithm/sort/TestDefaultVectorComparator.java +++ b/java/algorithm/src/test/java/org/apache/arrow/algorithm/sort/TestDefaultVectorComparator.java @@ -31,12 +31,14 @@ import org.apache.arrow.vector.Decimal256Vector; import org.apache.arrow.vector.DecimalVector; import org.apache.arrow.vector.DurationVector; +import org.apache.arrow.vector.FixedSizeBinaryVector; import org.apache.arrow.vector.Float4Vector; import org.apache.arrow.vector.Float8Vector; import org.apache.arrow.vector.IntVector; import org.apache.arrow.vector.IntervalDayVector; import org.apache.arrow.vector.LargeVarBinaryVector; import org.apache.arrow.vector.LargeVarCharVector; +import org.apache.arrow.vector.NullVector; import org.apache.arrow.vector.SmallIntVector; import org.apache.arrow.vector.TimeMicroVector; import org.apache.arrow.vector.TimeMilliVector; @@ -52,6 +54,8 @@ import org.apache.arrow.vector.ValueVector; import org.apache.arrow.vector.VarBinaryVector; import org.apache.arrow.vector.VarCharVector; +import org.apache.arrow.vector.complex.FixedSizeListVector; +import org.apache.arrow.vector.complex.LargeListVector; import org.apache.arrow.vector.complex.ListVector; import org.apache.arrow.vector.testing.ValueVectorDataPopulator; import org.apache.arrow.vector.types.TimeUnit; @@ -158,6 +162,61 @@ public void testCopiedComparatorForLists() { } } + private FixedSizeListVector createFixedSizeListVector(int count) { + FixedSizeListVector listVector = FixedSizeListVector.empty("list vector", count, allocator); + Types.MinorType type = Types.MinorType.INT; + listVector.addOrGetVector(FieldType.nullable(type.getType())); + listVector.allocateNew(); + + IntVector dataVector = (IntVector) listVector.getDataVector(); + + for (int i = 0; i < count; i++) { + dataVector.set(i, i); + } + dataVector.setValueCount(count); + + listVector.setNotNull(0); + listVector.setValueCount(1); + + return listVector; + } + + @Test + public void testCompareFixedSizeLists() { + try (FixedSizeListVector listVector1 = createFixedSizeListVector(10); + FixedSizeListVector listVector2 = createFixedSizeListVector(11)) { + VectorValueComparator comparator = + DefaultVectorComparators.createDefaultComparator(listVector1); + comparator.attachVectors(listVector1, listVector2); + + // prefix is smaller + assertTrue(comparator.compare(0, 0) < 0); + } + + try (FixedSizeListVector listVector1 = createFixedSizeListVector(11); + FixedSizeListVector listVector2 = createFixedSizeListVector(11)) { + ((IntVector) listVector2.getDataVector()).set(10, 110); + + VectorValueComparator comparator = + DefaultVectorComparators.createDefaultComparator(listVector1); + comparator.attachVectors(listVector1, listVector2); + + // breaking tie by the last element + assertTrue(comparator.compare(0, 0) < 0); + } + + try (FixedSizeListVector listVector1 = createFixedSizeListVector(10); + FixedSizeListVector listVector2 = createFixedSizeListVector(10)) { + + VectorValueComparator comparator = + DefaultVectorComparators.createDefaultComparator(listVector1); + comparator.attachVectors(listVector1, listVector2); + + // list vector elements equal + assertTrue(comparator.compare(0, 0) == 0); + } + } + @Test public void testCompareUInt1() { try (UInt1Vector vec = new UInt1Vector("", allocator)) { @@ -845,6 +904,65 @@ public void testCompareTimeStamp() { } } + @Test + public void testCompareFixedSizeBinary() { + try (FixedSizeBinaryVector vector1 = new FixedSizeBinaryVector("test1", allocator, 2); + FixedSizeBinaryVector vector2 = new FixedSizeBinaryVector("test1", allocator, 3)) { + vector1.allocateNew(); + vector2.allocateNew(); + vector1.set(0, new byte[] {1, 1}); + vector2.set(0, new byte[] {1, 1, 0}); + VectorValueComparator comparator = + DefaultVectorComparators.createDefaultComparator(vector1); + comparator.attachVectors(vector1, vector2); + + // prefix is smaller + assertTrue(comparator.compare(0, 0) < 0); + } + + try (FixedSizeBinaryVector vector1 = new FixedSizeBinaryVector("test1", allocator, 3); + FixedSizeBinaryVector vector2 = new FixedSizeBinaryVector("test1", allocator, 3)) { + vector1.allocateNew(); + vector2.allocateNew(); + vector1.set(0, new byte[] {1, 1, 0}); + vector2.set(0, new byte[] {1, 1, 1}); + VectorValueComparator comparator = + DefaultVectorComparators.createDefaultComparator(vector1); + comparator.attachVectors(vector1, vector2); + + // breaking tie by the last element + assertTrue(comparator.compare(0, 0) < 0); + } + + try (FixedSizeBinaryVector vector1 = new FixedSizeBinaryVector("test1", allocator, 3); + FixedSizeBinaryVector vector2 = new FixedSizeBinaryVector("test1", allocator, 3)) { + vector1.allocateNew(); + vector2.allocateNew(); + vector1.set(0, new byte[] {1, 1, 1}); + vector2.set(0, new byte[] {1, 1, 1}); + VectorValueComparator comparator = + DefaultVectorComparators.createDefaultComparator(vector1); + comparator.attachVectors(vector1, vector2); + + // list vector elements equal + assertTrue(comparator.compare(0, 0) == 0); + } + } + + @Test + public void testCompareNull() { + try (NullVector vec = new NullVector("test", + FieldType.notNullable(new ArrowType.Int(32, false)))) { + vec.setValueCount(2); + + VectorValueComparator comparator = + DefaultVectorComparators.createDefaultComparator(vec); + comparator.attachVector(vec); + assertEquals(DefaultVectorComparators.NullComparator.class, comparator.getClass()); + assertEquals(0, comparator.compare(0, 1)); + } + } + @Test public void testCheckNullsOnCompareIsFalseForNonNullableVector() { try (IntVector vec = new IntVector("not nullable", @@ -937,4 +1055,18 @@ private static void verifyVariableWidthComparatorReturne VectorValueComparator comparator = DefaultVectorComparators.createDefaultComparator(vec); assertEquals(DefaultVectorComparators.VariableWidthComparator.class, comparator.getClass()); } + + @Test + public void testRepeatedDefaultComparators() { + final FieldType type = FieldType.nullable(Types.MinorType.INT.getType()); + try (final LargeListVector vector = new LargeListVector("list", allocator, type, null)) { + vector.addOrGetVector(FieldType.nullable(type.getType())); + verifyRepeatedComparatorReturned(vector); + } + } + + private static void verifyRepeatedComparatorReturned(V vec) { + VectorValueComparator comparator = DefaultVectorComparators.createDefaultComparator(vec); + assertEquals(DefaultVectorComparators.RepeatedValueComparator.class, comparator.getClass()); + } } diff --git a/java/bom/pom.xml b/java/bom/pom.xml index 7ea0b9bf514be..1f6f854f60013 100644 --- a/java/bom/pom.xml +++ b/java/bom/pom.xml @@ -20,7 +20,7 @@ org.apache.arrow arrow-bom - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT pom Arrow Bill of Materials Arrow Bill of Materials diff --git a/java/c/pom.xml b/java/c/pom.xml index a40a7f9e7f5f6..d66379d356624 100644 --- a/java/c/pom.xml +++ b/java/c/pom.xml @@ -13,7 +13,7 @@ arrow-java-root org.apache.arrow - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT 4.0.0 diff --git a/java/compression/pom.xml b/java/compression/pom.xml index ff2835acbe8a4..129268b9e6a07 100644 --- a/java/compression/pom.xml +++ b/java/compression/pom.xml @@ -14,7 +14,7 @@ org.apache.arrow arrow-java-root - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT arrow-compression Arrow Compression diff --git a/java/dataset/pom.xml b/java/dataset/pom.xml index ad88c02c16fe3..b533a1733521b 100644 --- a/java/dataset/pom.xml +++ b/java/dataset/pom.xml @@ -15,7 +15,7 @@ arrow-java-root org.apache.arrow - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT 4.0.0 diff --git a/java/flight/flight-core/pom.xml b/java/flight/flight-core/pom.xml index c9fa75615032b..7b69179053d7f 100644 --- a/java/flight/flight-core/pom.xml +++ b/java/flight/flight-core/pom.xml @@ -14,7 +14,7 @@ arrow-flight org.apache.arrow - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT ../pom.xml diff --git a/java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightEndpoint.java b/java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightEndpoint.java index ad78cfbd210d4..1967fe1d91c34 100644 --- a/java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightEndpoint.java +++ b/java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightEndpoint.java @@ -23,6 +23,7 @@ import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; +import java.util.Base64; import java.util.Collections; import java.util.List; import java.util.Objects; @@ -30,6 +31,7 @@ import org.apache.arrow.flight.impl.Flight; +import com.google.protobuf.ByteString; import com.google.protobuf.Timestamp; /** @@ -39,6 +41,7 @@ public class FlightEndpoint { private final List locations; private final Ticket ticket; private final Instant expirationTime; + private final byte[] appMetadata; /** * Constructs a new endpoint with no expiration time. @@ -54,13 +57,22 @@ public FlightEndpoint(Ticket ticket, Location... locations) { * Constructs a new endpoint with an expiration time. * * @param ticket A ticket that describe the key of a data stream. + * @param expirationTime (optional) When this endpoint expires. * @param locations The possible locations the stream can be retrieved from. */ public FlightEndpoint(Ticket ticket, Instant expirationTime, Location... locations) { + this(ticket, expirationTime, null, Collections.unmodifiableList(new ArrayList<>(Arrays.asList(locations)))); + } + + /** + * Private constructor with all parameters. Should only be called by Builder. + */ + private FlightEndpoint(Ticket ticket, Instant expirationTime, byte[] appMetadata, List locations) { Objects.requireNonNull(ticket); - this.locations = Collections.unmodifiableList(new ArrayList<>(Arrays.asList(locations))); + this.locations = locations; this.expirationTime = expirationTime; this.ticket = ticket; + this.appMetadata = appMetadata; } /** @@ -77,6 +89,7 @@ public FlightEndpoint(Ticket ticket, Instant expirationTime, Location... locatio } else { this.expirationTime = null; } + this.appMetadata = (flt.getAppMetadata().size() == 0 ? null : flt.getAppMetadata().toByteArray()); this.ticket = new Ticket(flt.getTicket()); } @@ -92,6 +105,10 @@ public Optional getExpirationTime() { return Optional.ofNullable(expirationTime); } + public byte[] getAppMetadata() { + return appMetadata; + } + /** * Converts to the protocol buffer representation. */ @@ -111,6 +128,10 @@ Flight.FlightEndpoint toProtocol() { .build()); } + if (appMetadata != null) { + b.setAppMetadata(ByteString.copyFrom(appMetadata)); + } + return b.build(); } @@ -148,12 +169,13 @@ public boolean equals(Object o) { FlightEndpoint that = (FlightEndpoint) o; return locations.equals(that.locations) && ticket.equals(that.ticket) && - Objects.equals(expirationTime, that.expirationTime); + Objects.equals(expirationTime, that.expirationTime) && + Arrays.equals(appMetadata, that.appMetadata); } @Override public int hashCode() { - return Objects.hash(locations, ticket, expirationTime); + return Objects.hash(locations, ticket, expirationTime, Arrays.hashCode(appMetadata)); } @Override @@ -162,6 +184,59 @@ public String toString() { "locations=" + locations + ", ticket=" + ticket + ", expirationTime=" + (expirationTime == null ? "(none)" : expirationTime.toString()) + + ", appMetadata=" + (appMetadata == null ? "(none)" : Base64.getEncoder().encodeToString(appMetadata)) + '}'; } + + /** + * Create a builder for FlightEndpoint. + * + * @param ticket A ticket that describe the key of a data stream. + * @param locations The possible locations the stream can be retrieved from. + */ + public static Builder builder(Ticket ticket, Location... locations) { + return new Builder(ticket, locations); + } + + /** + * Builder for FlightEndpoint. + */ + public static final class Builder { + private final Ticket ticket; + private final List locations; + private Instant expirationTime = null; + private byte[] appMetadata = null; + + private Builder(Ticket ticket, Location... locations) { + this.ticket = ticket; + this.locations = Collections.unmodifiableList(new ArrayList<>(Arrays.asList(locations))); + } + + /** + * Set expiration time for the endpoint. Default is null, which means don't expire. + * + * @param expirationTime (optional) When this endpoint expires. + */ + public Builder setExpirationTime(Instant expirationTime) { + this.expirationTime = expirationTime; + return this; + } + + /** + * Set the app metadata to send along with the flight. Default is null; + * + * @param appMetadata Metadata to send along with the flight + */ + public Builder setAppMetadata(byte[] appMetadata) { + this.appMetadata = appMetadata; + return this; + } + + /** + * Build FlightEndpoint object. + */ + public FlightEndpoint build() { + return new FlightEndpoint(ticket, expirationTime, appMetadata, locations); + } + } } diff --git a/java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightInfo.java b/java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightInfo.java index d871f89465c83..b5279a304c865 100644 --- a/java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightInfo.java +++ b/java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightInfo.java @@ -23,6 +23,8 @@ import java.nio.ByteBuffer; import java.nio.channels.Channels; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Base64; import java.util.Collections; import java.util.List; import java.util.Objects; @@ -51,6 +53,7 @@ public class FlightInfo { private final long records; private final boolean ordered; private final IpcOption option; + private final byte[] appMetadata; /** * Constructs a new instance. @@ -94,6 +97,23 @@ public FlightInfo(Schema schema, FlightDescriptor descriptor, List endpoints, long bytes, long records, boolean ordered, IpcOption option) { + this(schema, descriptor, endpoints, bytes, records, ordered, option, null); + } + + /** + * Constructs a new instance. + * + * @param schema The schema of the Flight + * @param descriptor An identifier for the Flight. + * @param endpoints A list of endpoints that have the flight available. + * @param bytes The number of bytes in the flight + * @param records The number of records in the flight. + * @param ordered Whether the endpoints in this flight are ordered. + * @param option IPC write options. + * @param appMetadata Metadata to send along with the flight + */ + public FlightInfo(Schema schema, FlightDescriptor descriptor, List endpoints, long bytes, + long records, boolean ordered, IpcOption option, byte[] appMetadata) { Objects.requireNonNull(descriptor); Objects.requireNonNull(endpoints); if (schema != null) { @@ -106,6 +126,7 @@ public FlightInfo(Schema schema, FlightDescriptor descriptor, List endpoints) { + return new Builder(schema, descriptor, endpoints); + } + + /** + * Builder for FlightInfo. + */ + public static final class Builder { + private final Schema schema; + private final FlightDescriptor descriptor; + private final List endpoints; + private long bytes = -1; + private long records = -1; + private boolean ordered = false; + private IpcOption option = IpcOption.DEFAULT; + private byte[] appMetadata = null; + + private Builder(Schema schema, FlightDescriptor descriptor, List endpoints) { + this.schema = schema; + this.descriptor = descriptor; + this.endpoints = endpoints; + } + + /** + * Set the number of bytes for the flight. Default to -1 for unknown. + * + * @param bytes The number of bytes in the flight + */ + public Builder setBytes(long bytes) { + this.bytes = bytes; + return this; + } + + /** + * Set the number of records for the flight. Default to -1 for unknown. + * + * @param records The number of records in the flight. + */ + public Builder setRecords(long records) { + this.records = records; + return this; + } + + /** + * Set whether the flight endpoints are ordered. Default is false. + * + * @param ordered Whether the endpoints in this flight are ordered. + */ + public Builder setOrdered(boolean ordered) { + this.ordered = ordered; + return this; + } + + /** + * Set IPC write options. Default is IpcOption.DEFAULT + * + * @param option IPC write options. + */ + public Builder setOption(IpcOption option) { + this.option = option; + return this; + } + + /** + * Set the app metadata to send along with the flight. Default is null. + * + * @param appMetadata Metadata to send along with the flight + */ + public Builder setAppMetadata(byte[] appMetadata) { + this.appMetadata = appMetadata; + return this; + } + + /** + * Build FlightInfo object. + */ + public FlightInfo build() { + return new FlightInfo(schema, descriptor, endpoints, bytes, records, ordered, option, appMetadata); + } + } } diff --git a/java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightServer.java b/java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightServer.java index 7f15798f6a326..234c9bdcaacc1 100644 --- a/java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightServer.java +++ b/java/flight/flight-core/src/main/java/org/apache/arrow/flight/FlightServer.java @@ -34,6 +34,8 @@ import java.util.concurrent.TimeUnit; import java.util.function.Consumer; +import javax.net.ssl.SSLException; + import org.apache.arrow.flight.auth.ServerAuthHandler; import org.apache.arrow.flight.auth.ServerAuthInterceptor; import org.apache.arrow.flight.auth2.Auth2Constants; @@ -49,9 +51,14 @@ import io.grpc.Server; import io.grpc.ServerInterceptors; +import io.grpc.netty.GrpcSslContexts; import io.grpc.netty.NettyServerBuilder; import io.netty.channel.EventLoopGroup; import io.netty.channel.ServerChannel; +import io.netty.handler.ssl.ClientAuth; +import io.netty.handler.ssl.SslContext; +import io.netty.handler.ssl.SslContextBuilder; + /** * Generic server of flight data that is customized via construction with delegate classes for the @@ -172,6 +179,8 @@ public static final class Builder { private int maxInboundMessageSize = MAX_GRPC_MESSAGE_SIZE; private InputStream certChain; private InputStream key; + private InputStream mTlsCACert; + private SslContext sslContext; private final List> interceptors; // Keep track of inserted interceptors private final Set interceptorKeys; @@ -245,7 +254,25 @@ public FlightServer build() { } if (certChain != null) { - builder.useTransportSecurity(certChain, key); + SslContextBuilder sslContextBuilder = GrpcSslContexts + .forServer(certChain, key); + + if (mTlsCACert != null) { + sslContextBuilder + .clientAuth(ClientAuth.REQUIRE) + .trustManager(mTlsCACert); + } + try { + sslContext = sslContextBuilder.build(); + } catch (SSLException e) { + throw new RuntimeException(e); + } finally { + closeMTlsCACert(); + closeCertChain(); + closeKey(); + } + + builder.sslContext(sslContext); } // Share one executor between the gRPC service, DoPut, and Handshake @@ -306,14 +333,69 @@ public Builder maxInboundMessageSize(int maxMessageSize) { return this; } + /** + * A small utility function to ensure that InputStream attributes. + * are closed if they are not null + * @param stream The InputStream to close (if it is not null). + */ + private void closeInputStreamIfNotNull(InputStream stream) { + if (stream != null) { + try { + stream.close(); + } catch (IOException ignored) { + } + } + } + + /** + * A small utility function to ensure that the certChain attribute + * is closed if it is not null. It then sets the attribute to null. + */ + private void closeCertChain() { + closeInputStreamIfNotNull(certChain); + certChain = null; + } + + /** + * A small utility function to ensure that the key attribute + * is closed if it is not null. It then sets the attribute to null. + */ + private void closeKey() { + closeInputStreamIfNotNull(key); + key = null; + } + + /** + * A small utility function to ensure that the mTlsCACert attribute + * is closed if it is not null. It then sets the attribute to null. + */ + private void closeMTlsCACert() { + closeInputStreamIfNotNull(mTlsCACert); + mTlsCACert = null; + } + /** * Enable TLS on the server. * @param certChain The certificate chain to use. * @param key The private key to use. */ public Builder useTls(final File certChain, final File key) throws IOException { + closeCertChain(); this.certChain = new FileInputStream(certChain); + + closeKey(); this.key = new FileInputStream(key); + + return this; + } + + /** + * Enable Client Verification via mTLS on the server. + * @param mTlsCACert The CA certificate to use for verifying clients. + */ + public Builder useMTlsClientVerification(final File mTlsCACert) throws IOException { + closeMTlsCACert(); + this.mTlsCACert = new FileInputStream(mTlsCACert); return this; } @@ -322,9 +404,23 @@ public Builder useTls(final File certChain, final File key) throws IOException { * @param certChain The certificate chain to use. * @param key The private key to use. */ - public Builder useTls(final InputStream certChain, final InputStream key) { + public Builder useTls(final InputStream certChain, final InputStream key) throws IOException { + closeCertChain(); this.certChain = certChain; + + closeKey(); this.key = key; + + return this; + } + + /** + * Enable mTLS on the server. + * @param mTlsCACert The CA certificate to use for verifying clients. + */ + public Builder useMTlsClientVerification(final InputStream mTlsCACert) throws IOException { + closeMTlsCACert(); + this.mTlsCACert = mTlsCACert; return this; } diff --git a/java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth2/ClientIncomingAuthHeaderMiddleware.java b/java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth2/ClientIncomingAuthHeaderMiddleware.java index be5f3f54d326c..7bb55d145d104 100644 --- a/java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth2/ClientIncomingAuthHeaderMiddleware.java +++ b/java/flight/flight-core/src/main/java/org/apache/arrow/flight/auth2/ClientIncomingAuthHeaderMiddleware.java @@ -34,7 +34,7 @@ public class ClientIncomingAuthHeaderMiddleware implements FlightClientMiddlewar */ public static class Factory implements FlightClientMiddleware.Factory { private final ClientHeaderHandler headerHandler; - private CredentialCallOption credentialCallOption; + private CredentialCallOption credentialCallOption = null; /** * Construct a factory with the given header handler. diff --git a/java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestBasicOperation.java b/java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestBasicOperation.java index 238221f051a7e..41b3a4693e579 100644 --- a/java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestBasicOperation.java +++ b/java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestBasicOperation.java @@ -113,10 +113,13 @@ public void roundTripInfo() throws Exception { Field.nullable("a", new ArrowType.Int(32, true)), Field.nullable("b", new ArrowType.FixedSizeBinary(32)) ), metadata); - final FlightInfo info1 = new FlightInfo(schema, FlightDescriptor.path(), Collections.emptyList(), -1, -1); + final FlightInfo info1 = FlightInfo.builder(schema, FlightDescriptor.path(), Collections.emptyList()) + .setAppMetadata("foo".getBytes()).build(); final FlightInfo info2 = new FlightInfo(schema, FlightDescriptor.command(new byte[2]), - Collections.singletonList(new FlightEndpoint( - new Ticket(new byte[10]), Location.forGrpcDomainSocket("/tmp/test.sock"))), 200, 500); + Collections.singletonList( + FlightEndpoint.builder(new Ticket(new byte[10]), Location.forGrpcDomainSocket("/tmp/test.sock")) + .setAppMetadata("bar".getBytes()).build() + ), 200, 500); final FlightInfo info3 = new FlightInfo(schema, FlightDescriptor.path("a", "b"), Arrays.asList(new FlightEndpoint( new Ticket(new byte[10]), Location.forGrpcDomainSocket("/tmp/test.sock")), diff --git a/java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestFlightService.java b/java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestFlightService.java index 691048fb03ed3..0e4669f29ce43 100644 --- a/java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestFlightService.java +++ b/java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestFlightService.java @@ -28,6 +28,7 @@ import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.memory.RootAllocator; import org.apache.arrow.util.AutoCloseables; +import org.apache.arrow.vector.ipc.message.IpcOption; import org.apache.arrow.vector.types.pojo.Schema; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; @@ -137,7 +138,8 @@ public void supportsNullSchemas() throws Exception @Override public FlightInfo getFlightInfo(CallContext context, FlightDescriptor descriptor) { - return new FlightInfo(null, descriptor, Collections.emptyList(), 0, 0); + return new FlightInfo(null, descriptor, Collections.emptyList(), + 0, 0, false, IpcOption.DEFAULT, "foo".getBytes()); } }; @@ -147,6 +149,7 @@ public FlightInfo getFlightInfo(CallContext context, FlightInfo flightInfo = client.getInfo(FlightDescriptor.path("test")); Assertions.assertEquals(Optional.empty(), flightInfo.getSchemaOptional()); Assertions.assertEquals(new Schema(Collections.emptyList()), flightInfo.getSchema()); + Assertions.assertArrayEquals(flightInfo.getAppMetadata(), "foo".getBytes()); Exception e = Assertions.assertThrows( FlightRuntimeException.class, diff --git a/java/flight/flight-core/src/test/java/org/apache/arrow/flight/auth/TestBasicAuth.java b/java/flight/flight-core/src/test/java/org/apache/arrow/flight/auth/TestBasicAuth.java index 6544b23dab635..176277866b73a 100644 --- a/java/flight/flight-core/src/test/java/org/apache/arrow/flight/auth/TestBasicAuth.java +++ b/java/flight/flight-core/src/test/java/org/apache/arrow/flight/auth/TestBasicAuth.java @@ -41,10 +41,11 @@ import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.Schema; +import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import com.google.common.collect.ImmutableList; @@ -56,8 +57,8 @@ public class TestBasicAuth { private static final byte[] VALID_TOKEN = "my_token".getBytes(StandardCharsets.UTF_8); private FlightClient client; - private FlightServer server; - private BufferAllocator allocator; + private static FlightServer server; + private static BufferAllocator allocator; @Test public void validAuth() { @@ -65,8 +66,6 @@ public void validAuth() { Assertions.assertTrue(ImmutableList.copyOf(client.listFlights(Criteria.ALL)).size() == 0); } - // ARROW-7722: this test occasionally leaks memory - @Disabled @Test public void asyncCall() throws Exception { client.authenticateBasic(USERNAME, PASSWORD); @@ -97,7 +96,12 @@ public void didntAuth() { } @BeforeEach - public void setup() throws IOException { + public void testSetup() throws IOException { + client = FlightClient.builder(allocator, server.getLocation()).build(); + } + + @BeforeAll + public static void setup() throws IOException { allocator = new RootAllocator(Long.MAX_VALUE); final BasicServerAuthHandler.BasicAuthValidator validator = new BasicServerAuthHandler.BasicAuthValidator() { @@ -149,12 +153,19 @@ public void getStream(CallContext context, Ticket ticket, ServerStreamListener l } } }).authHandler(new BasicServerAuthHandler(validator)).build().start(); - client = FlightClient.builder(allocator, server.getLocation()).build(); } @AfterEach - public void shutdown() throws Exception { - AutoCloseables.close(client, server, allocator); + public void tearDown() throws Exception { + AutoCloseables.close(client); + } + + @AfterAll + public static void shutdown() throws Exception { + AutoCloseables.close(server); + + allocator.getChildAllocators().forEach(BufferAllocator::close); + AutoCloseables.close(allocator); } } diff --git a/java/flight/flight-core/src/test/java/org/apache/arrow/flight/auth2/TestBasicAuth2.java b/java/flight/flight-core/src/test/java/org/apache/arrow/flight/auth2/TestBasicAuth2.java index 4ccc73fcac10e..cadd67d3ed241 100644 --- a/java/flight/flight-core/src/test/java/org/apache/arrow/flight/auth2/TestBasicAuth2.java +++ b/java/flight/flight-core/src/test/java/org/apache/arrow/flight/auth2/TestBasicAuth2.java @@ -41,10 +41,9 @@ import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.Schema; -import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import com.google.common.base.Strings; @@ -57,18 +56,18 @@ public class TestBasicAuth2 { private static final String NO_USERNAME = ""; private static final String PASSWORD_1 = "woohoo1"; private static final String PASSWORD_2 = "woohoo2"; - private BufferAllocator allocator; - private FlightServer server; - private FlightClient client; - private FlightClient client2; + private static BufferAllocator allocator; + private static FlightServer server; + private static FlightClient client; + private static FlightClient client2; - @BeforeEach - public void setup() throws Exception { + @BeforeAll + public static void setup() throws Exception { allocator = new RootAllocator(Long.MAX_VALUE); startServerAndClient(); } - private FlightProducer getFlightProducer() { + private static FlightProducer getFlightProducer() { return new NoOpFlightProducer() { @Override public void listFlights(CallContext context, Criteria criteria, @@ -99,23 +98,26 @@ public void getStream(CallContext context, Ticket ticket, ServerStreamListener l }; } - private void startServerAndClient() throws IOException { + private static void startServerAndClient() throws IOException { final FlightProducer flightProducer = getFlightProducer(); - this.server = FlightServer + server = FlightServer .builder(allocator, forGrpcInsecure(LOCALHOST, 0), flightProducer) .headerAuthenticator(new GeneratedBearerTokenAuthenticator( - new BasicCallHeaderAuthenticator(this::validate))) + new BasicCallHeaderAuthenticator(TestBasicAuth2::validate))) .build().start(); - this.client = FlightClient.builder(allocator, server.getLocation()) + client = FlightClient.builder(allocator, server.getLocation()) .build(); } - @AfterEach - public void shutdown() throws Exception { - AutoCloseables.close(client, client2, server, allocator); + @AfterAll + public static void shutdown() throws Exception { + AutoCloseables.close(client, client2, server); client = null; client2 = null; server = null; + + allocator.getChildAllocators().forEach(BufferAllocator::close); + AutoCloseables.close(allocator); allocator = null; } @@ -124,7 +126,7 @@ private void startClient2() throws IOException { .build(); } - private CallHeaderAuthenticator.AuthResult validate(String username, String password) { + private static CallHeaderAuthenticator.AuthResult validate(String username, String password) { if (Strings.isNullOrEmpty(username)) { throw CallStatus.UNAUTHENTICATED.withDescription("Credentials not supplied.").toRuntimeException(); } @@ -156,14 +158,12 @@ public void validAuthWithMultipleClientsWithDifferentCredentialsWithBearerAuthSe testValidAuthWithMultipleClientsWithDifferentCredentials(client, client2); } - // ARROW-7722: this test occasionally leaks memory - @Disabled @Test public void asyncCall() throws Exception { final CredentialCallOption bearerToken = client .authenticateBasicToken(USERNAME_1, PASSWORD_1).get(); client.listFlights(Criteria.ALL, bearerToken); - try (final FlightStream s = client.getStream(new Ticket(new byte[1]))) { + try (final FlightStream s = client.getStream(new Ticket(new byte[1]), bearerToken)) { while (s.next()) { Assertions.assertEquals(4095, s.getRoot().getRowCount()); } diff --git a/java/flight/flight-grpc/pom.xml b/java/flight/flight-grpc/pom.xml index c857500d560b0..e7bb9508d2403 100644 --- a/java/flight/flight-grpc/pom.xml +++ b/java/flight/flight-grpc/pom.xml @@ -13,7 +13,7 @@ arrow-flight org.apache.arrow - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT ../pom.xml 4.0.0 diff --git a/java/flight/flight-integration-tests/pom.xml b/java/flight/flight-integration-tests/pom.xml index 20e31e0ceec04..778cb7df11f61 100644 --- a/java/flight/flight-integration-tests/pom.xml +++ b/java/flight/flight-integration-tests/pom.xml @@ -15,7 +15,7 @@ arrow-flight org.apache.arrow - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT ../pom.xml diff --git a/java/flight/flight-integration-tests/src/main/java/org/apache/arrow/flight/integration/tests/AppMetadataFlightInfoEndpointScenario.java b/java/flight/flight-integration-tests/src/main/java/org/apache/arrow/flight/integration/tests/AppMetadataFlightInfoEndpointScenario.java new file mode 100644 index 0000000000000..3220bb5a2d20f --- /dev/null +++ b/java/flight/flight-integration-tests/src/main/java/org/apache/arrow/flight/integration/tests/AppMetadataFlightInfoEndpointScenario.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.flight.integration.tests; + +import java.nio.charset.StandardCharsets; +import java.util.Collections; +import java.util.List; + +import org.apache.arrow.flight.FlightClient; +import org.apache.arrow.flight.FlightDescriptor; +import org.apache.arrow.flight.FlightEndpoint; +import org.apache.arrow.flight.FlightInfo; +import org.apache.arrow.flight.FlightProducer; +import org.apache.arrow.flight.FlightServer; +import org.apache.arrow.flight.Location; +import org.apache.arrow.flight.NoOpFlightProducer; +import org.apache.arrow.flight.Ticket; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.Schema; + +/** Test app_metadata in FlightInfo and FlightEndpoint. */ +final class AppMetadataFlightInfoEndpointScenario implements Scenario { + @Override + public FlightProducer producer(BufferAllocator allocator, Location location) throws Exception { + return new AppMetadataFlightInfoEndpointProducer(); + } + + @Override + public void buildServer(FlightServer.Builder builder) throws Exception { + } + + @Override + public void client(BufferAllocator allocator, Location location, FlightClient client) throws Exception { + byte[] cmd = "foobar".getBytes(StandardCharsets.UTF_8); + FlightInfo info = client.getInfo(FlightDescriptor.command(cmd)); + IntegrationAssertions.assertEquals(info.getAppMetadata(), cmd); + IntegrationAssertions.assertEquals(info.getEndpoints().size(), 1); + IntegrationAssertions.assertEquals(info.getEndpoints().get(0).getAppMetadata(), cmd); + } + + /** producer for app_metadata test. */ + static class AppMetadataFlightInfoEndpointProducer extends NoOpFlightProducer { + @Override + public FlightInfo getFlightInfo(CallContext context, FlightDescriptor descriptor) { + byte[] cmd = descriptor.getCommand(); + + Schema schema = new Schema( + Collections.singletonList(Field.notNullable("number", Types.MinorType.UINT4.getType()))); + + List endpoints = Collections.singletonList( + FlightEndpoint.builder( + new Ticket("".getBytes(StandardCharsets.UTF_8))).setAppMetadata(cmd).build()); + + return FlightInfo.builder(schema, descriptor, endpoints).setAppMetadata(cmd).build(); + } + } +} + + diff --git a/java/flight/flight-integration-tests/src/main/java/org/apache/arrow/flight/integration/tests/Scenarios.java b/java/flight/flight-integration-tests/src/main/java/org/apache/arrow/flight/integration/tests/Scenarios.java index 26629c650e30f..c61fd94a4d228 100644 --- a/java/flight/flight-integration-tests/src/main/java/org/apache/arrow/flight/integration/tests/Scenarios.java +++ b/java/flight/flight-integration-tests/src/main/java/org/apache/arrow/flight/integration/tests/Scenarios.java @@ -49,6 +49,7 @@ private Scenarios() { scenarios.put("poll_flight_info", PollFlightInfoScenario::new); scenarios.put("flight_sql", FlightSqlScenario::new); scenarios.put("flight_sql:extension", FlightSqlExtensionScenario::new); + scenarios.put("app_metadata_flight_info_endpoint", AppMetadataFlightInfoEndpointScenario::new); } private static Scenarios getInstance() { diff --git a/java/flight/flight-integration-tests/src/test/java/org/apache/arrow/flight/integration/tests/IntegrationTest.java b/java/flight/flight-integration-tests/src/test/java/org/apache/arrow/flight/integration/tests/IntegrationTest.java index cf65e16fac06f..477a56055cbbc 100644 --- a/java/flight/flight-integration-tests/src/test/java/org/apache/arrow/flight/integration/tests/IntegrationTest.java +++ b/java/flight/flight-integration-tests/src/test/java/org/apache/arrow/flight/integration/tests/IntegrationTest.java @@ -78,6 +78,11 @@ void flightSqlExtension() throws Exception { testScenario("flight_sql:extension"); } + @Test + void appMetadataFlightInfoEndpoint() throws Exception { + testScenario("app_metadata_flight_info_endpoint"); + } + void testScenario(String scenarioName) throws Exception { try (final BufferAllocator allocator = new RootAllocator()) { final FlightServer.Builder builder = FlightServer.builder() diff --git a/java/flight/flight-sql-jdbc-core/pom.xml b/java/flight/flight-sql-jdbc-core/pom.xml index b99f887a6c9f4..cbeaa88f1e2f7 100644 --- a/java/flight/flight-sql-jdbc-core/pom.xml +++ b/java/flight/flight-sql-jdbc-core/pom.xml @@ -16,7 +16,7 @@ arrow-flight org.apache.arrow - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT ../pom.xml 4.0.0 @@ -136,12 +136,6 @@ bcpkix-jdk15on 1.61 - - - joda-time - joda-time - 2.10.14 - diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowDatabaseMetadata.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowDatabaseMetadata.java index da2b0b00edaef..3487e58a64678 100644 --- a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowDatabaseMetadata.java +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowDatabaseMetadata.java @@ -45,11 +45,11 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.util.Arrays; -import java.util.Collections; import java.util.EnumMap; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -145,8 +145,8 @@ public class ArrowDatabaseMetadata extends AvaticaDatabaseMetaData { Field.notNullable("IS_AUTOINCREMENT", Types.MinorType.VARCHAR.getType()), Field.notNullable("IS_GENERATEDCOLUMN", Types.MinorType.VARCHAR.getType()) )); - private final Map cachedSqlInfo = - Collections.synchronizedMap(new EnumMap<>(SqlInfo.class)); + private final AtomicBoolean isCachePopulated = new AtomicBoolean(false); + private final Map cachedSqlInfo = new EnumMap<>(SqlInfo.class); private static final Map sqlTypesToFlightEnumConvertTypes = new HashMap<>(); static { @@ -729,10 +729,15 @@ private T getSqlInfoAndCacheIfCacheIsEmpty(final SqlInfo sqlInfoCommand, final Class desiredType) throws SQLException { final ArrowFlightConnection connection = getConnection(); - if (cachedSqlInfo.isEmpty()) { - final FlightInfo sqlInfo = connection.getClientHandler().getSqlInfo(); + if (!isCachePopulated.get()) { + // Lock-and-populate the cache. Only issue the call to getSqlInfo() once, + // populate the cache, then mark it as populated. + // Note that multiple callers from separate threads can see that the cache is not populated, but only + // one thread will try to populate the cache. Other threads will see the cache is non-empty when acquiring + // the lock on the cache and skip population. synchronized (cachedSqlInfo) { if (cachedSqlInfo.isEmpty()) { + final FlightInfo sqlInfo = connection.getClientHandler().getSqlInfo(); try (final ResultSet resultSet = ArrowFlightJdbcFlightStreamResultSet.fromFlightInfo( connection, sqlInfo, null)) { @@ -741,6 +746,7 @@ private T getSqlInfoAndCacheIfCacheIsEmpty(final SqlInfo sqlInfoCommand, resultSet.getObject("value")); } } + isCachePopulated.set(true); } } } diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightConnection.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightConnection.java index 79bc04d27fe01..ad19c616ff29a 100644 --- a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightConnection.java +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightConnection.java @@ -101,11 +101,16 @@ private static ArrowFlightSqlClientHandler createNewClientHandler( .withTrustStorePath(config.getTrustStorePath()) .withTrustStorePassword(config.getTrustStorePassword()) .withSystemTrustStore(config.useSystemTrustStore()) + .withTlsRootCertificates(config.getTlsRootCertificatesPath()) + .withClientCertificate(config.getClientCertificatePath()) + .withClientKey(config.getClientKeyPath()) .withBufferAllocator(allocator) .withEncryption(config.useEncryption()) .withDisableCertificateVerification(config.getDisableCertificateVerification()) .withToken(config.getToken()) .withCallOptions(config.toCallOption()) + .withRetainCookies(config.retainCookies()) + .withRetainAuth(config.retainAuth()) .build(); } catch (final SQLException e) { try { diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightJdbcFactory.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightJdbcFactory.java index 216e4cd002bc3..16bdede02d039 100644 --- a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightJdbcFactory.java +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightJdbcFactory.java @@ -17,8 +17,6 @@ package org.apache.arrow.driver.jdbc; -import static org.apache.arrow.driver.jdbc.utils.ConvertUtils.convertArrowFieldsToColumnMetaDataList; - import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.util.Properties; @@ -26,7 +24,6 @@ import org.apache.arrow.driver.jdbc.client.ArrowFlightSqlClientHandler; import org.apache.arrow.memory.RootAllocator; -import org.apache.arrow.vector.types.pojo.Schema; import org.apache.calcite.avatica.AvaticaConnection; import org.apache.calcite.avatica.AvaticaFactory; import org.apache.calcite.avatica.AvaticaResultSetMetaData; @@ -89,12 +86,6 @@ public ArrowFlightPreparedStatement newPreparedStatement( ArrowFlightSqlClientHandler.PreparedStatement preparedStatement = flightConnection.getMeta().getPreparedStatement(statementHandle); - if (preparedStatement == null) { - preparedStatement = flightConnection.getClientHandler().prepare(signature.sql); - } - final Schema resultSetSchema = preparedStatement.getDataSetSchema(); - signature.columns.addAll(convertArrowFieldsToColumnMetaDataList(resultSetSchema.getFields())); - return ArrowFlightPreparedStatement.newPreparedStatement( flightConnection, preparedStatement, statementHandle, signature, resultType, resultSetConcurrency, resultSetHoldability); diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightJdbcFlightStreamResultSet.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightJdbcFlightStreamResultSet.java index 9a53f9fcafdd2..e23267ebe9ebf 100644 --- a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightJdbcFlightStreamResultSet.java +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightJdbcFlightStreamResultSet.java @@ -17,7 +17,7 @@ package org.apache.arrow.driver.jdbc; -import static org.apache.arrow.driver.jdbc.utils.FlightStreamQueue.createNewQueue; +import static org.apache.arrow.driver.jdbc.utils.FlightEndpointDataQueue.createNewQueue; import java.sql.ResultSet; import java.sql.ResultSetMetaData; @@ -26,7 +26,8 @@ import java.util.TimeZone; import java.util.concurrent.TimeUnit; -import org.apache.arrow.driver.jdbc.utils.FlightStreamQueue; +import org.apache.arrow.driver.jdbc.client.CloseableEndpointStreamPair; +import org.apache.arrow.driver.jdbc.utils.FlightEndpointDataQueue; import org.apache.arrow.driver.jdbc.utils.VectorSchemaRootTransformer; import org.apache.arrow.flight.FlightInfo; import org.apache.arrow.flight.FlightStream; @@ -47,8 +48,8 @@ public final class ArrowFlightJdbcFlightStreamResultSet extends ArrowFlightJdbcVectorSchemaRootResultSet { private final ArrowFlightConnection connection; - private FlightStream currentFlightStream; - private FlightStreamQueue flightStreamQueue; + private CloseableEndpointStreamPair currentEndpointData; + private FlightEndpointDataQueue flightEndpointDataQueue; private VectorSchemaRootTransformer transformer; private VectorSchemaRoot currentVectorSchemaRoot; @@ -92,7 +93,7 @@ static ArrowFlightJdbcFlightStreamResultSet fromFlightInfo( final TimeZone timeZone = TimeZone.getDefault(); final QueryState state = new QueryState(); - final Meta.Signature signature = ArrowFlightMetaImpl.newSignature(null); + final Meta.Signature signature = ArrowFlightMetaImpl.newSignature(null, null, null); final AvaticaResultSetMetaData resultSetMetaData = new AvaticaResultSetMetaData(null, null, signature); @@ -102,20 +103,20 @@ static ArrowFlightJdbcFlightStreamResultSet fromFlightInfo( resultSet.transformer = transformer; - resultSet.execute(flightInfo); + resultSet.populateData(flightInfo); return resultSet; } private void loadNewQueue() { - Optional.ofNullable(flightStreamQueue).ifPresent(AutoCloseables::closeNoChecked); - flightStreamQueue = createNewQueue(connection.getExecutorService()); + Optional.ofNullable(flightEndpointDataQueue).ifPresent(AutoCloseables::closeNoChecked); + flightEndpointDataQueue = createNewQueue(connection.getExecutorService()); } private void loadNewFlightStream() throws SQLException { - if (currentFlightStream != null) { - AutoCloseables.closeNoChecked(currentFlightStream); + if (currentEndpointData != null) { + AutoCloseables.closeNoChecked(currentEndpointData); } - this.currentFlightStream = getNextFlightStream(true); + this.currentEndpointData = getNextEndpointStream(true); } @Override @@ -124,24 +125,24 @@ protected AvaticaResultSet execute() throws SQLException { if (flightInfo != null) { schema = flightInfo.getSchemaOptional().orElse(null); - execute(flightInfo); + populateData(flightInfo); } return this; } - private void execute(final FlightInfo flightInfo) throws SQLException { + private void populateData(final FlightInfo flightInfo) throws SQLException { loadNewQueue(); - flightStreamQueue.enqueue(connection.getClientHandler().getStreams(flightInfo)); + flightEndpointDataQueue.enqueue(connection.getClientHandler().getStreams(flightInfo)); loadNewFlightStream(); // Ownership of the root will be passed onto the cursor. - if (currentFlightStream != null) { - executeForCurrentFlightStream(); + if (currentEndpointData != null) { + populateDataForCurrentFlightStream(); } } - private void executeForCurrentFlightStream() throws SQLException { - final VectorSchemaRoot originalRoot = currentFlightStream.getRoot(); + private void populateDataForCurrentFlightStream() throws SQLException { + final VectorSchemaRoot originalRoot = currentEndpointData.getStream().getRoot(); if (transformer != null) { try { @@ -153,11 +154,7 @@ private void executeForCurrentFlightStream() throws SQLException { currentVectorSchemaRoot = originalRoot; } - if (schema != null) { - execute(currentVectorSchemaRoot, schema); - } else { - execute(currentVectorSchemaRoot); - } + populateData(currentVectorSchemaRoot, schema); } @Override @@ -179,20 +176,20 @@ public boolean next() throws SQLException { return true; } - if (currentFlightStream != null) { - currentFlightStream.getRoot().clear(); - if (currentFlightStream.next()) { - executeForCurrentFlightStream(); + if (currentEndpointData != null) { + currentEndpointData.getStream().getRoot().clear(); + if (currentEndpointData.getStream().next()) { + populateDataForCurrentFlightStream(); continue; } - flightStreamQueue.enqueue(currentFlightStream); + flightEndpointDataQueue.enqueue(currentEndpointData); } - currentFlightStream = getNextFlightStream(false); + currentEndpointData = getNextEndpointStream(false); - if (currentFlightStream != null) { - executeForCurrentFlightStream(); + if (currentEndpointData != null) { + populateDataForCurrentFlightStream(); continue; } @@ -207,14 +204,14 @@ public boolean next() throws SQLException { @Override protected void cancel() { super.cancel(); - final FlightStream currentFlightStream = this.currentFlightStream; - if (currentFlightStream != null) { - currentFlightStream.cancel("Cancel", null); + final CloseableEndpointStreamPair currentEndpoint = this.currentEndpointData; + if (currentEndpoint != null) { + currentEndpoint.getStream().cancel("Cancel", null); } - if (flightStreamQueue != null) { + if (flightEndpointDataQueue != null) { try { - flightStreamQueue.close(); + flightEndpointDataQueue.close(); } catch (final Exception e) { throw new RuntimeException(e); } @@ -224,13 +221,14 @@ protected void cancel() { @Override public synchronized void close() { try { - if (flightStreamQueue != null) { + if (flightEndpointDataQueue != null) { // flightStreamQueue should close currentFlightStream internally - flightStreamQueue.close(); - } else if (currentFlightStream != null) { + flightEndpointDataQueue.close(); + } else if (currentEndpointData != null) { // close is only called for currentFlightStream if there's no queue - currentFlightStream.close(); + currentEndpointData.close(); } + } catch (final Exception e) { throw new RuntimeException(e); } finally { @@ -238,13 +236,13 @@ public synchronized void close() { } } - private FlightStream getNextFlightStream(final boolean isExecution) throws SQLException { - if (isExecution) { + private CloseableEndpointStreamPair getNextEndpointStream(final boolean canTimeout) throws SQLException { + if (canTimeout) { final int statementTimeout = statement != null ? statement.getQueryTimeout() : 0; return statementTimeout != 0 ? - flightStreamQueue.next(statementTimeout, TimeUnit.SECONDS) : flightStreamQueue.next(); + flightEndpointDataQueue.next(statementTimeout, TimeUnit.SECONDS) : flightEndpointDataQueue.next(); } else { - return flightStreamQueue.next(); + return flightEndpointDataQueue.next(); } } } diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightJdbcVectorSchemaRootResultSet.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightJdbcVectorSchemaRootResultSet.java index 9e377e51decc9..626ae95bc5bbe 100644 --- a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightJdbcVectorSchemaRootResultSet.java +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightJdbcVectorSchemaRootResultSet.java @@ -17,20 +17,18 @@ package org.apache.arrow.driver.jdbc; -import static java.util.Objects.isNull; - import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.util.HashSet; import java.util.List; +import java.util.Objects; import java.util.Set; import java.util.TimeZone; import org.apache.arrow.driver.jdbc.utils.ConvertUtils; import org.apache.arrow.util.AutoCloseables; import org.apache.arrow.vector.VectorSchemaRoot; -import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.Schema; import org.apache.calcite.avatica.AvaticaResultSet; import org.apache.calcite.avatica.AvaticaResultSetMetaData; @@ -74,7 +72,7 @@ public static ArrowFlightJdbcVectorSchemaRootResultSet fromVectorSchemaRoot( final TimeZone timeZone = TimeZone.getDefault(); final QueryState state = new QueryState(); - final Meta.Signature signature = ArrowFlightMetaImpl.newSignature(null); + final Meta.Signature signature = ArrowFlightMetaImpl.newSignature(null, null, null); final AvaticaResultSetMetaData resultSetMetaData = new AvaticaResultSetMetaData(null, null, signature); @@ -83,7 +81,7 @@ public static ArrowFlightJdbcVectorSchemaRootResultSet fromVectorSchemaRoot( new ArrowFlightJdbcVectorSchemaRootResultSet(null, state, signature, resultSetMetaData, timeZone, null); - resultSet.execute(vectorSchemaRoot); + resultSet.populateData(vectorSchemaRoot); return resultSet; } @@ -92,18 +90,13 @@ protected AvaticaResultSet execute() throws SQLException { throw new RuntimeException("Can only execute with execute(VectorSchemaRoot)"); } - void execute(final VectorSchemaRoot vectorSchemaRoot) { - final List fields = vectorSchemaRoot.getSchema().getFields(); - final List columns = ConvertUtils.convertArrowFieldsToColumnMetaDataList(fields); - signature.columns.clear(); - signature.columns.addAll(columns); - - this.vectorSchemaRoot = vectorSchemaRoot; - execute2(new ArrowFlightJdbcCursor(vectorSchemaRoot), this.signature.columns); + void populateData(final VectorSchemaRoot vectorSchemaRoot) { + populateData(vectorSchemaRoot, null); } - void execute(final VectorSchemaRoot vectorSchemaRoot, final Schema schema) { - final List columns = ConvertUtils.convertArrowFieldsToColumnMetaDataList(schema.getFields()); + void populateData(final VectorSchemaRoot vectorSchemaRoot, final Schema schema) { + Schema currentSchema = schema == null ? vectorSchemaRoot.getSchema() : schema; + final List columns = ConvertUtils.convertArrowFieldsToColumnMetaDataList(currentSchema.getFields()); signature.columns.clear(); signature.columns.addAll(columns); @@ -137,7 +130,7 @@ public void close() { } catch (final Exception e) { exceptions.add(e); } - if (!isNull(statement)) { + if (!Objects.isNull(statement)) { try { super.close(); } catch (final Exception e) { diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightMetaImpl.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightMetaImpl.java index f825e7d13cef5..382750914992f 100644 --- a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightMetaImpl.java +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightMetaImpl.java @@ -17,8 +17,6 @@ package org.apache.arrow.driver.jdbc; -import static java.lang.String.format; - import java.sql.Connection; import java.sql.SQLException; import java.sql.SQLTimeoutException; @@ -29,7 +27,10 @@ import java.util.concurrent.ConcurrentHashMap; import org.apache.arrow.driver.jdbc.client.ArrowFlightSqlClientHandler.PreparedStatement; +import org.apache.arrow.driver.jdbc.utils.AvaticaParameterBinder; +import org.apache.arrow.driver.jdbc.utils.ConvertUtils; import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.pojo.Schema; import org.apache.calcite.avatica.AvaticaConnection; import org.apache.calcite.avatica.AvaticaParameter; import org.apache.calcite.avatica.ColumnMetaData; @@ -54,12 +55,20 @@ public ArrowFlightMetaImpl(final AvaticaConnection connection) { setDefaultConnectionProperties(); } - static Signature newSignature(final String sql) { + /** + * Construct a signature. + */ + static Signature newSignature(final String sql, Schema resultSetSchema, Schema parameterSchema) { + List columnMetaData = resultSetSchema == null ? + new ArrayList<>() : ConvertUtils.convertArrowFieldsToColumnMetaDataList(resultSetSchema.getFields()); + List parameters = parameterSchema == null ? + new ArrayList<>() : ConvertUtils.convertArrowFieldsToAvaticaParameters(parameterSchema.getFields()); + return new Signature( - new ArrayList(), + columnMetaData, sql, - Collections.emptyList(), - Collections.emptyMap(), + parameters, + Collections.emptyMap(), null, // unnecessary, as SQL requests use ArrowFlightJdbcCursor StatementType.SELECT ); @@ -84,23 +93,28 @@ public void commit(final ConnectionHandle connectionHandle) { public ExecuteResult execute(final StatementHandle statementHandle, final List typedValues, final long maxRowCount) { Preconditions.checkArgument(connection.id.equals(statementHandle.connectionId), - "Connection IDs are not consistent"); + "Connection IDs are not consistent"); + PreparedStatement preparedStatement = getPreparedStatement(statementHandle); + + if (preparedStatement == null) { + throw new IllegalStateException("Prepared statement not found: " + statementHandle); + } + + + new AvaticaParameterBinder(preparedStatement, ((ArrowFlightConnection) connection).getBufferAllocator()) + .bind(typedValues); + if (statementHandle.signature == null) { // Update query - final StatementHandleKey key = new StatementHandleKey(statementHandle); - PreparedStatement preparedStatement = statementHandlePreparedStatementMap.get(key); - if (preparedStatement == null) { - throw new IllegalStateException("Prepared statement not found: " + statementHandle); - } long updatedCount = preparedStatement.executeUpdate(); return new ExecuteResult(Collections.singletonList(MetaResultSet.count(statementHandle.connectionId, - statementHandle.id, updatedCount))); + statementHandle.id, updatedCount))); } else { // TODO Why is maxRowCount ignored? return new ExecuteResult( - Collections.singletonList(MetaResultSet.create( - statementHandle.connectionId, statementHandle.id, - true, statementHandle.signature, null))); + Collections.singletonList(MetaResultSet.create( + statementHandle.connectionId, statementHandle.id, + true, statementHandle.signature, null))); } } @@ -114,7 +128,23 @@ public ExecuteResult execute(final StatementHandle statementHandle, public ExecuteBatchResult executeBatch(final StatementHandle statementHandle, final List> parameterValuesList) throws IllegalStateException { - throw new IllegalStateException("executeBatch not implemented."); + Preconditions.checkArgument(connection.id.equals(statementHandle.connectionId), + "Connection IDs are not consistent"); + PreparedStatement preparedStatement = getPreparedStatement(statementHandle); + + if (preparedStatement == null) { + throw new IllegalStateException("Prepared statement not found: " + statementHandle); + } + + final AvaticaParameterBinder binder = new AvaticaParameterBinder(preparedStatement, + ((ArrowFlightConnection) connection).getBufferAllocator()); + for (int i = 0; i < parameterValuesList.size(); i++) { + binder.bind(parameterValuesList.get(i), i); + } + + // Update query + long[] updatedCounts = {preparedStatement.executeUpdate()}; + return new ExecuteBatchResult(updatedCounts); } @Override @@ -126,18 +156,24 @@ public Frame fetch(final StatementHandle statementHandle, final long offset, * the results. */ throw AvaticaConnection.HELPER.wrap( - format("%s does not use frames.", this), + String.format("%s does not use frames.", this), AvaticaConnection.HELPER.unsupported()); } + private PreparedStatement prepareForHandle(final String query, StatementHandle handle) { + final PreparedStatement preparedStatement = + ((ArrowFlightConnection) connection).getClientHandler().prepare(query); + handle.signature = newSignature(query, preparedStatement.getDataSetSchema(), + preparedStatement.getParameterSchema()); + statementHandlePreparedStatementMap.put(new StatementHandleKey(handle), preparedStatement); + return preparedStatement; + } + @Override public StatementHandle prepare(final ConnectionHandle connectionHandle, final String query, final long maxRowCount) { final StatementHandle handle = super.createStatement(connectionHandle); - handle.signature = newSignature(query); - final PreparedStatement preparedStatement = - ((ArrowFlightConnection) connection).getClientHandler().prepare(query); - statementHandlePreparedStatementMap.put(new StatementHandleKey(handle), preparedStatement); + prepareForHandle(query, handle); return handle; } @@ -157,20 +193,18 @@ public ExecuteResult prepareAndExecute(final StatementHandle handle, final PrepareCallback callback) throws NoSuchStatementException { try { - final PreparedStatement preparedStatement = - ((ArrowFlightConnection) connection).getClientHandler().prepare(query); + PreparedStatement preparedStatement = prepareForHandle(query, handle); final StatementType statementType = preparedStatement.getType(); - statementHandlePreparedStatementMap.put(new StatementHandleKey(handle), preparedStatement); - final Signature signature = newSignature(query); + final long updateCount = statementType.equals(StatementType.UPDATE) ? preparedStatement.executeUpdate() : -1; synchronized (callback.getMonitor()) { callback.clear(); - callback.assign(signature, null, updateCount); + callback.assign(handle.signature, null, updateCount); } callback.execute(); final MetaResultSet metaResultSet = MetaResultSet.create(handle.connectionId, handle.id, - false, signature, null); + false, handle.signature, null); return new ExecuteResult(Collections.singletonList(metaResultSet)); } catch (SQLTimeoutException e) { // So far AvaticaStatement(executeInternal) only handles NoSuchStatement and Runtime Exceptions. diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightPreparedStatement.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightPreparedStatement.java index 8784e39840b6a..7203f02daa9a1 100644 --- a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightPreparedStatement.java +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/ArrowFlightPreparedStatement.java @@ -17,15 +17,12 @@ package org.apache.arrow.driver.jdbc; -import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import org.apache.arrow.driver.jdbc.client.ArrowFlightSqlClientHandler; -import org.apache.arrow.driver.jdbc.utils.ConvertUtils; import org.apache.arrow.flight.FlightInfo; import org.apache.arrow.util.Preconditions; -import org.apache.arrow.vector.types.pojo.Schema; import org.apache.calcite.avatica.AvaticaPreparedStatement; import org.apache.calcite.avatica.Meta.Signature; import org.apache.calcite.avatica.Meta.StatementHandle; @@ -50,36 +47,6 @@ private ArrowFlightPreparedStatement(final ArrowFlightConnection connection, this.preparedStatement = Preconditions.checkNotNull(preparedStatement); } - /** - * Creates a new {@link ArrowFlightPreparedStatement} from the provided information. - * - * @param connection the {@link Connection} to use. - * @param statementHandle the {@link StatementHandle} to use. - * @param signature the {@link Signature} to use. - * @param resultSetType the ResultSet type. - * @param resultSetConcurrency the ResultSet concurrency. - * @param resultSetHoldability the ResultSet holdability. - * @return a new {@link PreparedStatement}. - * @throws SQLException on error. - */ - static ArrowFlightPreparedStatement createNewPreparedStatement( - final ArrowFlightConnection connection, - final StatementHandle statementHandle, - final Signature signature, - final int resultSetType, - final int resultSetConcurrency, - final int resultSetHoldability) throws SQLException { - - final ArrowFlightSqlClientHandler.PreparedStatement prepare = connection.getClientHandler().prepare(signature.sql); - final Schema resultSetSchema = prepare.getDataSetSchema(); - - signature.columns.addAll(ConvertUtils.convertArrowFieldsToColumnMetaDataList(resultSetSchema.getFields())); - - return new ArrowFlightPreparedStatement( - connection, prepare, statementHandle, - signature, resultSetType, resultSetConcurrency, resultSetHoldability); - } - static ArrowFlightPreparedStatement newPreparedStatement(final ArrowFlightConnection connection, final ArrowFlightSqlClientHandler.PreparedStatement preparedStmt, final StatementHandle statementHandle, diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/accessor/impl/calendar/ArrowFlightJdbcIntervalVectorAccessor.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/accessor/impl/calendar/ArrowFlightJdbcIntervalVectorAccessor.java index 283dc9160a9e9..21d1c15712cdb 100644 --- a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/accessor/impl/calendar/ArrowFlightJdbcIntervalVectorAccessor.java +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/accessor/impl/calendar/ArrowFlightJdbcIntervalVectorAccessor.java @@ -22,6 +22,8 @@ import static org.apache.arrow.vector.util.DateUtility.yearsToMonths; import java.sql.SQLException; +import java.time.Duration; +import java.time.Period; import java.util.function.IntSupplier; import org.apache.arrow.driver.jdbc.accessor.ArrowFlightJdbcAccessor; @@ -31,7 +33,6 @@ import org.apache.arrow.vector.IntervalYearVector; import org.apache.arrow.vector.holders.NullableIntervalDayHolder; import org.apache.arrow.vector.holders.NullableIntervalYearHolder; -import org.joda.time.Period; /** * Accessor for the Arrow type {@link IntervalDayVector}. @@ -62,7 +63,7 @@ public ArrowFlightJdbcIntervalVectorAccessor(IntervalDayVector vector, } else { final int days = holder.days; final int millis = holder.milliseconds; - return formatIntervalDay(new Period().plusDays(days).plusMillis(millis)); + return formatIntervalDay(Duration.ofDays(days).plusMillis(millis)); } }; objectClass = java.time.Duration.class; @@ -89,7 +90,7 @@ public ArrowFlightJdbcIntervalVectorAccessor(IntervalYearVector vector, final int interval = holder.value; final int years = (interval / yearsToMonths); final int months = (interval % yearsToMonths); - return formatIntervalYear(new Period().plusYears(years).plusMonths(months)); + return formatIntervalYear(Period.ofYears(years).plusMonths(months)); } }; objectClass = java.time.Period.class; diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/client/ArrowFlightSqlClientHandler.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/client/ArrowFlightSqlClientHandler.java index bb1d524aca008..54fd17853c00b 100644 --- a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/client/ArrowFlightSqlClientHandler.java +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/client/ArrowFlightSqlClientHandler.java @@ -18,14 +18,15 @@ package org.apache.arrow.driver.jdbc.client; import java.io.IOException; +import java.net.URI; import java.security.GeneralSecurityException; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; -import java.util.stream.Collectors; import org.apache.arrow.driver.jdbc.client.utils.ClientAuthenticationUtils; import org.apache.arrow.flight.CallOption; @@ -35,8 +36,8 @@ import org.apache.arrow.flight.FlightInfo; import org.apache.arrow.flight.FlightRuntimeException; import org.apache.arrow.flight.FlightStatusCode; -import org.apache.arrow.flight.FlightStream; import org.apache.arrow.flight.Location; +import org.apache.arrow.flight.LocationSchemes; import org.apache.arrow.flight.auth2.BearerCredentialWriter; import org.apache.arrow.flight.auth2.ClientBearerHeaderHandler; import org.apache.arrow.flight.auth2.ClientIncomingAuthHeaderMiddleware; @@ -48,6 +49,8 @@ import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.util.AutoCloseables; import org.apache.arrow.util.Preconditions; +import org.apache.arrow.util.VisibleForTesting; +import org.apache.arrow.vector.VectorSchemaRoot; import org.apache.arrow.vector.types.pojo.Schema; import org.apache.calcite.avatica.Meta.StatementType; import org.slf4j.Logger; @@ -58,13 +61,18 @@ */ public final class ArrowFlightSqlClientHandler implements AutoCloseable { private static final Logger LOGGER = LoggerFactory.getLogger(ArrowFlightSqlClientHandler.class); + private final FlightSqlClient sqlClient; private final Set options = new HashSet<>(); + private final Builder builder; ArrowFlightSqlClientHandler(final FlightSqlClient sqlClient, - final Collection options) { - this.options.addAll(options); + final Builder builder, + final Collection credentialOptions) { + this.options.addAll(builder.options); + this.options.addAll(credentialOptions); this.sqlClient = Preconditions.checkNotNull(sqlClient); + this.builder = builder; } /** @@ -75,8 +83,9 @@ public final class ArrowFlightSqlClientHandler implements AutoCloseable { * @return a new {@link ArrowFlightSqlClientHandler}. */ public static ArrowFlightSqlClientHandler createNewHandler(final FlightClient client, + final Builder builder, final Collection options) { - return new ArrowFlightSqlClientHandler(new FlightSqlClient(client), options); + return new ArrowFlightSqlClientHandler(new FlightSqlClient(client), builder, options); } /** @@ -95,11 +104,55 @@ private CallOption[] getOptions() { * @param flightInfo The {@link FlightInfo} instance from which to fetch results. * @return a {@code FlightStream} of results. */ - public List getStreams(final FlightInfo flightInfo) { - return flightInfo.getEndpoints().stream() - .map(FlightEndpoint::getTicket) - .map(ticket -> sqlClient.getStream(ticket, getOptions())) - .collect(Collectors.toList()); + public List getStreams(final FlightInfo flightInfo) throws SQLException { + final ArrayList endpoints = + new ArrayList<>(flightInfo.getEndpoints().size()); + + try { + for (FlightEndpoint endpoint : flightInfo.getEndpoints()) { + if (endpoint.getLocations().isEmpty()) { + // Create a stream using the current client only and do not close the client at the end. + endpoints.add(new CloseableEndpointStreamPair( + sqlClient.getStream(endpoint.getTicket(), getOptions()), null)); + } else { + // Clone the builder and then set the new endpoint on it. + // GH-38573: This code currently only tries the first Location and treats a failure as fatal. + // This should be changed to try other Locations that are available. + + // GH-38574: Currently a new FlightClient will be made for each partition that returns a non-empty Location + // then disposed of. It may be better to cache clients because a server may report the same Locations. + // It would also be good to identify when the reported location is the same as the original connection's + // Location and skip creating a FlightClient in that scenario. + final URI endpointUri = endpoint.getLocations().get(0).getUri(); + final Builder builderForEndpoint = new Builder(ArrowFlightSqlClientHandler.this.builder) + .withHost(endpointUri.getHost()) + .withPort(endpointUri.getPort()) + .withEncryption(endpointUri.getScheme().equals(LocationSchemes.GRPC_TLS)); + + final ArrowFlightSqlClientHandler endpointHandler = builderForEndpoint.build(); + try { + endpoints.add(new CloseableEndpointStreamPair( + endpointHandler.sqlClient.getStream(endpoint.getTicket(), + endpointHandler.getOptions()), endpointHandler.sqlClient)); + } catch (Exception ex) { + AutoCloseables.close(endpointHandler); + throw ex; + } + } + } + } catch (Exception outerException) { + try { + AutoCloseables.close(endpoints); + } catch (Exception innerEx) { + outerException.addSuppressed(innerEx); + } + + if (outerException instanceof SQLException) { + throw (SQLException) outerException; + } + throw new SQLException(outerException); + } + return endpoints; } /** @@ -155,6 +208,15 @@ public interface PreparedStatement extends AutoCloseable { */ Schema getDataSetSchema(); + /** + * Gets the {@link Schema} of the parameters for this {@link PreparedStatement}. + * + * @return {@link Schema}. + */ + Schema getParameterSchema(); + + void setParameters(VectorSchemaRoot parameters); + @Override void close(); } @@ -190,6 +252,16 @@ public Schema getDataSetSchema() { return preparedStatement.getResultSetSchema(); } + @Override + public Schema getParameterSchema() { + return preparedStatement.getParameterSchema(); + } + + @Override + public void setParameters(VectorSchemaRoot parameters) { + preparedStatement.setParameters(parameters); + } + @Override public void close() { try { @@ -354,16 +426,94 @@ public static final class Builder { private final Set options = new HashSet<>(); private String host; private int port; - private String username; - private String password; - private String trustStorePath; - private String trustStorePassword; - private String token; - private boolean useEncryption; - private boolean disableCertificateVerification; - private boolean useSystemTrustStore; + + @VisibleForTesting + String username; + + @VisibleForTesting + String password; + + @VisibleForTesting + String trustStorePath; + + @VisibleForTesting + String trustStorePassword; + + @VisibleForTesting + String token; + + @VisibleForTesting + boolean useEncryption = true; + + @VisibleForTesting + boolean disableCertificateVerification; + + @VisibleForTesting + boolean useSystemTrustStore = true; + + @VisibleForTesting + String tlsRootCertificatesPath; + + @VisibleForTesting + String clientCertificatePath; + + @VisibleForTesting + String clientKeyPath; + + @VisibleForTesting private BufferAllocator allocator; + @VisibleForTesting + boolean retainCookies = true; + + @VisibleForTesting + boolean retainAuth = true; + + // These two middlewares are for internal use within build() and should not be exposed by builder APIs. + // Note that these middlewares may not necessarily be registered. + @VisibleForTesting + ClientIncomingAuthHeaderMiddleware.Factory authFactory + = new ClientIncomingAuthHeaderMiddleware.Factory(new ClientBearerHeaderHandler()); + + @VisibleForTesting + ClientCookieMiddleware.Factory cookieFactory = new ClientCookieMiddleware.Factory(); + + public Builder() { + } + + /** + * Copies the builder. + * + * @param original The builder to base this copy off of. + */ + @VisibleForTesting + Builder(Builder original) { + this.middlewareFactories.addAll(original.middlewareFactories); + this.options.addAll(original.options); + this.host = original.host; + this.port = original.port; + this.username = original.username; + this.password = original.password; + this.trustStorePath = original.trustStorePath; + this.trustStorePassword = original.trustStorePassword; + this.token = original.token; + this.useEncryption = original.useEncryption; + this.disableCertificateVerification = original.disableCertificateVerification; + this.useSystemTrustStore = original.useSystemTrustStore; + this.tlsRootCertificatesPath = original.tlsRootCertificatesPath; + this.clientCertificatePath = original.clientCertificatePath; + this.clientKeyPath = original.clientKeyPath; + this.allocator = original.allocator; + + if (original.retainCookies) { + this.cookieFactory = original.cookieFactory; + } + + if (original.retainAuth) { + this.authFactory = original.authFactory; + } + } + /** * Sets the host for this handler. * @@ -464,7 +614,42 @@ public Builder withSystemTrustStore(final boolean useSystemTrustStore) { } /** - * Sets the token used in the token authetication. + * Sets the TLS root certificate path as an alternative to using the System + * or other Trust Store. The path must contain a valid PEM file. + * + * @param tlsRootCertificatesPath the TLS root certificate path (if TLS is required). + * @return this instance. + */ + public Builder withTlsRootCertificates(final String tlsRootCertificatesPath) { + this.tlsRootCertificatesPath = tlsRootCertificatesPath; + return this; + } + + /** + * Sets the mTLS client certificate path (if mTLS is required). + * + * @param clientCertificatePath the mTLS client certificate path (if mTLS is required). + * @return this instance. + */ + public Builder withClientCertificate(final String clientCertificatePath) { + this.clientCertificatePath = clientCertificatePath; + return this; + } + + /** + * Sets the mTLS client certificate private key path (if mTLS is required). + * + * @param clientKeyPath the mTLS client certificate private key path (if mTLS is required). + * @return this instance. + */ + public Builder withClientKey(final String clientKeyPath) { + this.clientKeyPath = clientKeyPath; + return this; + } + + /** + * Sets the token used in the token authentication. + * * @param token the token value. * @return this builder instance. */ @@ -485,6 +670,28 @@ public Builder withBufferAllocator(final BufferAllocator allocator) { return this; } + /** + * Indicates if cookies should be re-used by connections spawned for getStreams() calls. + * @param retainCookies The flag indicating if cookies should be re-used. + * @return this builder instance. + */ + public Builder withRetainCookies(boolean retainCookies) { + this.retainCookies = retainCookies; + return this; + } + + /** + * Indicates if bearer tokens negotiated should be re-used by connections + * spawned for getStreams() calls. + * + * @param retainAuth The flag indicating if auth tokens should be re-used. + * @return this builder instance. + */ + public Builder withRetainAuth(boolean retainAuth) { + this.retainAuth = retainAuth; + return this; + } + /** * Adds the provided {@code factories} to the list of {@link #middlewareFactories} of this handler. * @@ -535,18 +742,20 @@ public Builder withCallOptions(final Collection options) { * @throws SQLException on error. */ public ArrowFlightSqlClientHandler build() throws SQLException { + // Copy middlewares so that the build method doesn't change the state of the builder fields itself. + Set buildTimeMiddlewareFactories = new HashSet<>(this.middlewareFactories); FlightClient client = null; + boolean isUsingUserPasswordAuth = username != null && token == null; + try { - ClientIncomingAuthHeaderMiddleware.Factory authFactory = null; // Token should take priority since some apps pass in a username/password even when a token is provided - if (username != null && token == null) { - authFactory = - new ClientIncomingAuthHeaderMiddleware.Factory(new ClientBearerHeaderHandler()); - withMiddlewareFactories(authFactory); + if (isUsingUserPasswordAuth) { + buildTimeMiddlewareFactories.add(authFactory); } final FlightClient.Builder clientBuilder = FlightClient.builder().allocator(allocator); - withMiddlewareFactories(new ClientCookieMiddleware.Factory()); - middlewareFactories.forEach(clientBuilder::intercept); + + buildTimeMiddlewareFactories.add(new ClientCookieMiddleware.Factory()); + buildTimeMiddlewareFactories.forEach(clientBuilder::intercept); Location location; if (useEncryption) { location = Location.forGrpcTls(host, port); @@ -560,7 +769,10 @@ public ArrowFlightSqlClientHandler build() throws SQLException { if (disableCertificateVerification) { clientBuilder.verifyServer(false); } else { - if (useSystemTrustStore) { + if (tlsRootCertificatesPath != null) { + clientBuilder.trustedCertificates( + ClientAuthenticationUtils.getTlsRootCertificatesStream(tlsRootCertificatesPath)); + } else if (useSystemTrustStore) { clientBuilder.trustedCertificates( ClientAuthenticationUtils.getCertificateInputStreamFromSystem(trustStorePassword)); } else if (trustStorePath != null) { @@ -568,20 +780,34 @@ public ArrowFlightSqlClientHandler build() throws SQLException { ClientAuthenticationUtils.getCertificateStream(trustStorePath, trustStorePassword)); } } + + if (clientCertificatePath != null && clientKeyPath != null) { + clientBuilder.clientCertificate( + ClientAuthenticationUtils.getClientCertificateStream(clientCertificatePath), + ClientAuthenticationUtils.getClientKeyStream(clientKeyPath)); + } } client = clientBuilder.build(); - if (authFactory != null) { - options.add( - ClientAuthenticationUtils.getAuthenticate( - client, username, password, authFactory, options.toArray(new CallOption[0]))); + final ArrayList credentialOptions = new ArrayList<>(); + if (isUsingUserPasswordAuth) { + // If the authFactory has already been used for a handshake, use the existing token. + // This can occur if the authFactory is being re-used for a new connection spawned for getStream(). + if (authFactory.getCredentialCallOption() != null) { + credentialOptions.add(authFactory.getCredentialCallOption()); + } else { + // Otherwise do the handshake and get the token if possible. + credentialOptions.add( + ClientAuthenticationUtils.getAuthenticate( + client, username, password, authFactory, options.toArray(new CallOption[0]))); + } } else if (token != null) { - options.add( + credentialOptions.add( ClientAuthenticationUtils.getAuthenticate( client, new CredentialCallOption(new BearerCredentialWriter(token)), options.toArray( new CallOption[0]))); } - return ArrowFlightSqlClientHandler.createNewHandler(client, options); + return ArrowFlightSqlClientHandler.createNewHandler(client, this, credentialOptions); } catch (final IllegalArgumentException | GeneralSecurityException | IOException | FlightRuntimeException e) { final SQLException originalException = new SQLException(e); diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/client/CloseableEndpointStreamPair.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/client/CloseableEndpointStreamPair.java new file mode 100644 index 0000000000000..6c37a5b0c626c --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/client/CloseableEndpointStreamPair.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.client; + +import org.apache.arrow.flight.FlightStream; +import org.apache.arrow.flight.sql.FlightSqlClient; +import org.apache.arrow.util.AutoCloseables; +import org.apache.arrow.util.Preconditions; + +/** + * Represents a connection to a {@link org.apache.arrow.flight.FlightEndpoint}. + */ +public class CloseableEndpointStreamPair implements AutoCloseable { + + private final FlightStream stream; + private final FlightSqlClient client; + + public CloseableEndpointStreamPair(FlightStream stream, FlightSqlClient client) { + this.stream = Preconditions.checkNotNull(stream); + this.client = client; + } + + public FlightStream getStream() { + return stream; + } + + @Override + public void close() throws Exception { + AutoCloseables.close(stream, client); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/client/utils/ClientAuthenticationUtils.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/client/utils/ClientAuthenticationUtils.java index 6d9880bd27048..d50dc385a62e1 100644 --- a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/client/utils/ClientAuthenticationUtils.java +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/client/utils/ClientAuthenticationUtils.java @@ -227,14 +227,64 @@ public static InputStream getCertificateStream(final String keyStorePath, final KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); try (final InputStream keyStoreStream = Files - .newInputStream(Paths.get(Preconditions.checkNotNull(keyStorePath)))) { - keyStore.load(keyStoreStream, - Preconditions.checkNotNull(keyStorePass).toCharArray()); + .newInputStream(Paths.get(keyStorePath))) { + keyStore.load(keyStoreStream, keyStorePass.toCharArray()); } return getSingleCertificateInputStream(keyStore); } + /** + * Generates an {@link InputStream} that contains certificates for path-based + * TLS Root Certificates. + * + * @param tlsRootsCertificatesPath The path of the TLS Root Certificates. + * @return a new {code InputStream} containing the certificates. + * @throws GeneralSecurityException on error. + * @throws IOException on error. + */ + public static InputStream getTlsRootCertificatesStream(final String tlsRootsCertificatesPath) + throws GeneralSecurityException, IOException { + Preconditions.checkNotNull(tlsRootsCertificatesPath, "TLS Root certificates path cannot be null!"); + + return Files + .newInputStream(Paths.get(tlsRootsCertificatesPath)); + } + + /** + * Generates an {@link InputStream} that contains certificates for a path-based + * mTLS Client Certificate. + * + * @param clientCertificatePath The path of the mTLS Client Certificate. + * @return a new {code InputStream} containing the certificates. + * @throws GeneralSecurityException on error. + * @throws IOException on error. + */ + public static InputStream getClientCertificateStream(final String clientCertificatePath) + throws GeneralSecurityException, IOException { + Preconditions.checkNotNull(clientCertificatePath, "Client certificate path cannot be null!"); + + return Files + .newInputStream(Paths.get(clientCertificatePath)); + } + + /** + * Generates an {@link InputStream} that contains certificates for a path-based + * mTLS Client Key. + * + * @param clientKeyPath The path of the mTLS Client Key. + * @return a new {code InputStream} containing the certificates. + * @throws GeneralSecurityException on error. + * @throws IOException on error. + */ + public static InputStream getClientKeyStream(final String clientKeyPath) + throws GeneralSecurityException, IOException { + Preconditions.checkNotNull(clientKeyPath, "Client key path cannot be null!"); + + return Files + .newInputStream(Paths.get(clientKeyPath)); + } + private static InputStream getSingleCertificateInputStream(KeyStore keyStore) throws KeyStoreException, IOException, CertificateException { final Enumeration aliases = keyStore.aliases(); diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/AvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/AvaticaParameterConverter.java new file mode 100644 index 0000000000000..db34a8c9e1582 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/AvaticaParameterConverter.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter; + +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * Interface for a class in charge of converting between AvaticaParameters and TypedValues and + * Arrow. + */ +public interface AvaticaParameterConverter { + + /** + * Bind a TypedValue to a FieldVector at the given index. + * + * @param vector FieldVector that the parameter should be bound to. + * @param typedValue TypedValue to bind as a parameter. + * @param index Vector index (0-indexed) that the TypedValue should be bound to. + * @return Whether the value was set successfully. + */ + boolean bindParameter(FieldVector vector, TypedValue typedValue, int index); + + /** + * Create an AvaticaParameter from the given Field. + * + * @param field Arrow Field to generate an AvaticaParameter from. + */ + AvaticaParameter createParameter(Field field); +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/BaseAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/BaseAvaticaParameterConverter.java new file mode 100644 index 0000000000000..f5cf8358b7a14 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/BaseAvaticaParameterConverter.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.driver.jdbc.converter.AvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.utils.SqlTypes; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.SqlType; + +/** + * Base AvaticaParameterConverter with a generic createParameter method that can be used by most + * Arrow types. + */ +abstract class BaseAvaticaParameterConverter implements AvaticaParameterConverter { + protected AvaticaParameter createParameter(Field field, boolean signed) { + final String name = field.getName(); + final ArrowType arrowType = field.getType(); + final String typeName = arrowType.toString(); + final int precision = 0; // Would have to know about the actual number + final int scale = 0; // According to https://www.postgresql.org/docs/current/datatype-numeric.html + final int jdbcType = SqlTypes.getSqlTypeIdFromArrowType(arrowType); + final String className = SqlType.valueOf(jdbcType).clazz.getCanonicalName(); + return new AvaticaParameter(signed, precision, scale, jdbcType, typeName, className, name); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/BinaryAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/BinaryAvaticaParameterConverter.java new file mode 100644 index 0000000000000..d244848955e52 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/BinaryAvaticaParameterConverter.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.VarBinaryVector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for Binary Arrow types. + */ +public class BinaryAvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public BinaryAvaticaParameterConverter(ArrowType.Binary type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + byte[] value = (byte[]) typedValue.toJdbc(null); + if (vector instanceof VarBinaryVector) { + ((VarBinaryVector) vector).setSafe(index, value); + return true; + } + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, false); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/BoolAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/BoolAvaticaParameterConverter.java new file mode 100644 index 0000000000000..6725154d03c25 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/BoolAvaticaParameterConverter.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.BitVector; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for Bool Arrow types. + */ +public class BoolAvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public BoolAvaticaParameterConverter(ArrowType.Bool type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + boolean value = (boolean) typedValue.toLocal(); + if (vector instanceof BitVector) { + ((BitVector) vector).setSafe(index, value ? 1 : 0); + return true; + } + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, false); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/DateAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/DateAvaticaParameterConverter.java new file mode 100644 index 0000000000000..0da1dabe43721 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/DateAvaticaParameterConverter.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.DateDayVector; +import org.apache.arrow.vector.DateMilliVector; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for Date Arrow types. + */ +public class DateAvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public DateAvaticaParameterConverter(ArrowType.Date type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + int value = (int) typedValue.toLocal(); + if (vector instanceof DateMilliVector) { + ((DateMilliVector) vector).setSafe(index, value); + return true; + } else if (vector instanceof DateDayVector) { + ((DateDayVector) vector).setSafe(index, value); + return true; + } + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, false); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/DecimalAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/DecimalAvaticaParameterConverter.java new file mode 100644 index 0000000000000..fad43e2e06a76 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/DecimalAvaticaParameterConverter.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import java.math.BigDecimal; + +import org.apache.arrow.vector.DecimalVector; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for Decimal Arrow types. + */ +public class DecimalAvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public DecimalAvaticaParameterConverter(ArrowType.Decimal type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + BigDecimal value = (BigDecimal) typedValue.toLocal(); + if (vector instanceof DecimalVector) { + ((DecimalVector) vector).setSafe(index, value); + return true; + } + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, true); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/DurationAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/DurationAvaticaParameterConverter.java new file mode 100644 index 0000000000000..89f2fc1d5c12f --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/DurationAvaticaParameterConverter.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for Duration Arrow types. + */ +public class DurationAvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public DurationAvaticaParameterConverter(ArrowType.Duration type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, false); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/FixedSizeBinaryAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/FixedSizeBinaryAvaticaParameterConverter.java new file mode 100644 index 0000000000000..a90434f695ac3 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/FixedSizeBinaryAvaticaParameterConverter.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.FixedSizeBinaryVector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for FixedSizeBinary Arrow types. + */ +public class FixedSizeBinaryAvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public FixedSizeBinaryAvaticaParameterConverter(ArrowType.FixedSizeBinary type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + byte[] value = (byte[]) typedValue.toJdbc(null); + if (vector instanceof FixedSizeBinaryVector) { + ((FixedSizeBinaryVector) vector).setSafe(index, value); + return true; + } + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, false); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/FixedSizeListAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/FixedSizeListAvaticaParameterConverter.java new file mode 100644 index 0000000000000..60231a2460286 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/FixedSizeListAvaticaParameterConverter.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for FixedSizeList Arrow types. + */ +public class FixedSizeListAvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public FixedSizeListAvaticaParameterConverter(ArrowType.FixedSizeList type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, false); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/FloatingPointAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/FloatingPointAvaticaParameterConverter.java new file mode 100644 index 0000000000000..9f305a2b6f20d --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/FloatingPointAvaticaParameterConverter.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.Float4Vector; +import org.apache.arrow.vector.Float8Vector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for FloatingPoint Arrow types. + */ +public class FloatingPointAvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public FloatingPointAvaticaParameterConverter(ArrowType.FloatingPoint type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + Number value = (Number) typedValue.value; + if (vector instanceof Float4Vector) { + ((Float4Vector) vector).setSafe(index, value.floatValue()); + return true; + } else if (vector instanceof Float8Vector) { + ((Float8Vector) vector).setSafe(index, value.doubleValue()); + return true; + } + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, true); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/IntAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/IntAvaticaParameterConverter.java new file mode 100644 index 0000000000000..6684e8d32c9a9 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/IntAvaticaParameterConverter.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.BigIntVector; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.SmallIntVector; +import org.apache.arrow.vector.TinyIntVector; +import org.apache.arrow.vector.UInt1Vector; +import org.apache.arrow.vector.UInt2Vector; +import org.apache.arrow.vector.UInt4Vector; +import org.apache.arrow.vector.UInt8Vector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for Int Arrow types. + */ +public class IntAvaticaParameterConverter extends BaseAvaticaParameterConverter { + private final ArrowType.Int type; + + public IntAvaticaParameterConverter(ArrowType.Int type) { + this.type = type; + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + Number value = (Number) typedValue.value; + if (vector instanceof TinyIntVector) { + ((TinyIntVector) vector).setSafe(index, value.intValue()); + return true; + } else if (vector instanceof SmallIntVector) { + ((SmallIntVector) vector).setSafe(index, value.intValue()); + return true; + } else if (vector instanceof IntVector) { + ((IntVector) vector).setSafe(index, value.intValue()); + return true; + } else if (vector instanceof BigIntVector) { + ((BigIntVector) vector).setSafe(index, value.longValue()); + return true; + } else if (vector instanceof UInt1Vector) { + ((UInt1Vector) vector).setSafe(index, value.intValue()); + return true; + } else if (vector instanceof UInt2Vector) { + ((UInt2Vector) vector).setSafe(index, value.intValue()); + return true; + } else if (vector instanceof UInt4Vector) { + ((UInt4Vector) vector).setSafe(index, value.intValue()); + return true; + } else if (vector instanceof UInt8Vector) { + ((UInt8Vector) vector).setSafe(index, value.longValue()); + return true; + } + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, type.getIsSigned()); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/IntervalAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/IntervalAvaticaParameterConverter.java new file mode 100644 index 0000000000000..724275d51091e --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/IntervalAvaticaParameterConverter.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for Interval Arrow types. + */ +public class IntervalAvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public IntervalAvaticaParameterConverter(ArrowType.Interval type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + // Object value = typedValue.toLocal(); + // if (vector instanceof IntervalDayVector) { + // ((IntervalDayVector) vector).setSafe(index, () value); + // } else if (vector instanceof IntervalYearVector) { + // ((IntervalYearVector) vector).setSafe(index, () value); + // } + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, false); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/LargeBinaryAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/LargeBinaryAvaticaParameterConverter.java new file mode 100644 index 0000000000000..133ec2072d583 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/LargeBinaryAvaticaParameterConverter.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.LargeVarBinaryVector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for LargeBinary Arrow types. + */ +public class LargeBinaryAvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public LargeBinaryAvaticaParameterConverter(ArrowType.LargeBinary type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + byte[] value = (byte[]) typedValue.toJdbc(null); + if (vector instanceof LargeVarBinaryVector) { + ((LargeVarBinaryVector) vector).setSafe(index, value); + return true; + } + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, false); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/LargeListAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/LargeListAvaticaParameterConverter.java new file mode 100644 index 0000000000000..6ef6920474860 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/LargeListAvaticaParameterConverter.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for LargeList Arrow types. + */ +public class LargeListAvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public LargeListAvaticaParameterConverter(ArrowType.LargeList type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, false); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/LargeUtf8AvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/LargeUtf8AvaticaParameterConverter.java new file mode 100644 index 0000000000000..d412ab007ac67 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/LargeUtf8AvaticaParameterConverter.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.LargeVarCharVector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.util.Text; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for LargeUtf8 Arrow types. + */ +public class LargeUtf8AvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public LargeUtf8AvaticaParameterConverter(ArrowType.LargeUtf8 type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + String value = (String) typedValue.toLocal(); + if (vector instanceof LargeVarCharVector) { + ((LargeVarCharVector) vector).setSafe(index, new Text(value)); + return true; + } + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, false); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/ListAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/ListAvaticaParameterConverter.java new file mode 100644 index 0000000000000..aec59cb4d428e --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/ListAvaticaParameterConverter.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for List Arrow types. + */ +public class ListAvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public ListAvaticaParameterConverter(ArrowType.List type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, false); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/MapAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/MapAvaticaParameterConverter.java new file mode 100644 index 0000000000000..feac3794d222b --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/MapAvaticaParameterConverter.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for Map Arrow types. + */ +public class MapAvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public MapAvaticaParameterConverter(ArrowType.Map type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, false); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/NullAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/NullAvaticaParameterConverter.java new file mode 100644 index 0000000000000..e2c184fb11a09 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/NullAvaticaParameterConverter.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.NullVector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for Null Arrow types. + */ +public class NullAvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public NullAvaticaParameterConverter(ArrowType.Null type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + Object value = typedValue.toLocal(); + if (vector instanceof NullVector) { + if (value != null) { throw new RuntimeException("Can't set non-null value on NullVector"); } + vector.setNull(index); + } + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, false); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/StructAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/StructAvaticaParameterConverter.java new file mode 100644 index 0000000000000..5dfe923cb516e --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/StructAvaticaParameterConverter.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for Struct Arrow types. + */ +public class StructAvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public StructAvaticaParameterConverter(ArrowType.Struct type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, false); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/TimeAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/TimeAvaticaParameterConverter.java new file mode 100644 index 0000000000000..c6b79537fd435 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/TimeAvaticaParameterConverter.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.TimeMicroVector; +import org.apache.arrow.vector.TimeMilliVector; +import org.apache.arrow.vector.TimeNanoVector; +import org.apache.arrow.vector.TimeSecVector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for Time Arrow types. + */ +public class TimeAvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public TimeAvaticaParameterConverter(ArrowType.Time type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + int value = (int) typedValue.toLocal(); + if (vector instanceof TimeMicroVector) { + ((TimeMicroVector) vector).setSafe(index, value); + return true; + } else if (vector instanceof TimeMilliVector) { + ((TimeMilliVector) vector).setSafe(index, value); + return true; + } else if (vector instanceof TimeNanoVector) { + ((TimeNanoVector) vector).setSafe(index, value); + return true; + } else if (vector instanceof TimeSecVector) { + ((TimeSecVector) vector).setSafe(index, value); + return true; + } + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, false); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/TimestampAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/TimestampAvaticaParameterConverter.java new file mode 100644 index 0000000000000..eb3316a1bdbed --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/TimestampAvaticaParameterConverter.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.TimeStampMicroTZVector; +import org.apache.arrow.vector.TimeStampMicroVector; +import org.apache.arrow.vector.TimeStampMilliTZVector; +import org.apache.arrow.vector.TimeStampMilliVector; +import org.apache.arrow.vector.TimeStampNanoTZVector; +import org.apache.arrow.vector.TimeStampNanoVector; +import org.apache.arrow.vector.TimeStampSecTZVector; +import org.apache.arrow.vector.TimeStampSecVector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for Timestamp Arrow types. + */ +public class TimestampAvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public TimestampAvaticaParameterConverter(ArrowType.Timestamp type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + long value = (long) typedValue.toLocal(); + if (vector instanceof TimeStampSecVector) { + ((TimeStampSecVector) vector).setSafe(index, value); + return true; + } else if (vector instanceof TimeStampMicroVector) { + ((TimeStampMicroVector) vector).setSafe(index, value); + return true; + } else if (vector instanceof TimeStampMilliVector) { + ((TimeStampMilliVector) vector).setSafe(index, value); + return true; + } else if (vector instanceof TimeStampNanoVector) { + ((TimeStampNanoVector) vector).setSafe(index, value); + return true; + } else if (vector instanceof TimeStampSecTZVector) { + ((TimeStampSecTZVector) vector).setSafe(index, value); + return true; + } else if (vector instanceof TimeStampMicroTZVector) { + ((TimeStampMicroTZVector) vector).setSafe(index, value); + return true; + } else if (vector instanceof TimeStampMilliTZVector) { + ((TimeStampMilliTZVector) vector).setSafe(index, value); + return true; + } else if (vector instanceof TimeStampNanoTZVector) { + ((TimeStampNanoTZVector) vector).setSafe(index, value); + return true; + } + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, false); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/UnionAvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/UnionAvaticaParameterConverter.java new file mode 100644 index 0000000000000..6b171e685579a --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/UnionAvaticaParameterConverter.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for Union Arrow types. + */ +public class UnionAvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public UnionAvaticaParameterConverter(ArrowType.Union type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, false); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/Utf8AvaticaParameterConverter.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/Utf8AvaticaParameterConverter.java new file mode 100644 index 0000000000000..9223e5361d2d5 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/converter/impl/Utf8AvaticaParameterConverter.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.converter.impl; + +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.VarCharVector; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.util.Text; +import org.apache.calcite.avatica.AvaticaParameter; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * AvaticaParameterConverter for Utf8 Arrow types. + */ +public class Utf8AvaticaParameterConverter extends BaseAvaticaParameterConverter { + + public Utf8AvaticaParameterConverter(ArrowType.Utf8 type) { + } + + @Override + public boolean bindParameter(FieldVector vector, TypedValue typedValue, int index) { + String value = (String) typedValue.toLocal(); + if (vector instanceof VarCharVector) { + ((VarCharVector) vector).setSafe(index, new Text(value)); + return true; + } + return false; + } + + @Override + public AvaticaParameter createParameter(Field field) { + return createParameter(field, false); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/ArrowFlightConnectionConfigImpl.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/ArrowFlightConnectionConfigImpl.java index ac338a85d6292..6237a8b58d68a 100644 --- a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/ArrowFlightConnectionConfigImpl.java +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/ArrowFlightConnectionConfigImpl.java @@ -109,6 +109,18 @@ public boolean useSystemTrustStore() { return ArrowFlightConnectionProperty.USE_SYSTEM_TRUST_STORE.getBoolean(properties); } + public String getTlsRootCertificatesPath() { + return ArrowFlightConnectionProperty.TLS_ROOT_CERTS.getString(properties); + } + + public String getClientCertificatePath() { + return ArrowFlightConnectionProperty.CLIENT_CERTIFICATE.getString(properties); + } + + public String getClientKeyPath() { + return ArrowFlightConnectionProperty.CLIENT_KEY.getString(properties); + } + /** * Whether to use TLS encryption. * @@ -131,6 +143,22 @@ public int threadPoolSize() { return ArrowFlightConnectionProperty.THREAD_POOL_SIZE.getInteger(properties); } + /** + * Indicates if sub-connections created for stream retrieval + * should reuse cookies from the main connection. + */ + public boolean retainCookies() { + return ArrowFlightConnectionProperty.RETAIN_COOKIES.getBoolean(properties); + } + + /** + * Indicates if sub-connections created for stream retrieval + * should reuse bearer tokens created from the main connection. + */ + public boolean retainAuth() { + return ArrowFlightConnectionProperty.RETAIN_AUTH.getBoolean(properties); + } + /** * Gets the {@link CallOption}s from this {@link ConnectionConfig}. * @@ -175,8 +203,13 @@ public enum ArrowFlightConnectionProperty implements ConnectionProperty { TRUST_STORE("trustStore", null, Type.STRING, false), TRUST_STORE_PASSWORD("trustStorePassword", null, Type.STRING, false), USE_SYSTEM_TRUST_STORE("useSystemTrustStore", true, Type.BOOLEAN, false), + TLS_ROOT_CERTS("tlsRootCerts", null, Type.STRING, false), + CLIENT_CERTIFICATE("clientCertificate", null, Type.STRING, false), + CLIENT_KEY("clientKey", null, Type.STRING, false), THREAD_POOL_SIZE("threadPoolSize", 1, Type.NUMBER, false), - TOKEN("token", null, Type.STRING, false); + TOKEN("token", null, Type.STRING, false), + RETAIN_COOKIES("retainCookies", true, Type.BOOLEAN, false), + RETAIN_AUTH("retainAuth", true, Type.BOOLEAN, false); private final String camelName; private final Object defaultValue; diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/AvaticaParameterBinder.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/AvaticaParameterBinder.java new file mode 100644 index 0000000000000..9e805fc79bcf8 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/AvaticaParameterBinder.java @@ -0,0 +1,239 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.utils; + +import java.util.List; + +import org.apache.arrow.driver.jdbc.client.ArrowFlightSqlClientHandler.PreparedStatement; +import org.apache.arrow.driver.jdbc.converter.impl.BinaryAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.BoolAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.DateAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.DecimalAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.DurationAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.FixedSizeBinaryAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.FixedSizeListAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.FloatingPointAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.IntAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.IntervalAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.LargeBinaryAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.LargeListAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.LargeUtf8AvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.ListAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.MapAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.NullAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.StructAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.TimeAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.TimestampAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.UnionAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.Utf8AvaticaParameterConverter; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.calcite.avatica.remote.TypedValue; + +/** + * Convert Avatica PreparedStatement parameters from a list of TypedValue to Arrow and bind them to the + * VectorSchemaRoot representing the PreparedStatement parameters. + *

+ * NOTE: Make sure to close the parameters VectorSchemaRoot once we're done with them. + */ +public class AvaticaParameterBinder { + private final PreparedStatement preparedStatement; + private final VectorSchemaRoot parameters; + + public AvaticaParameterBinder(PreparedStatement preparedStatement, BufferAllocator bufferAllocator) { + this.parameters = VectorSchemaRoot.create(preparedStatement.getParameterSchema(), bufferAllocator); + this.preparedStatement = preparedStatement; + } + + /** + * Bind the given Avatica values to the prepared statement. + * @param typedValues The parameter values. + */ + public void bind(List typedValues) { + bind(typedValues, 0); + } + + /** + * Bind the given Avatica values to the prepared statement at the given index. + * @param typedValues The parameter values. + * @param index index for parameter. + */ + public void bind(List typedValues, int index) { + if (preparedStatement.getParameterSchema().getFields().size() != typedValues.size()) { + throw new IllegalStateException( + String.format("Prepared statement has %s parameters, but only received %s", + preparedStatement.getParameterSchema().getFields().size(), + typedValues.size())); + } + + for (int i = 0; i < typedValues.size(); i++) { + bind(parameters.getVector(i), typedValues.get(i), index); + } + + if (!typedValues.isEmpty()) { + parameters.setRowCount(index + 1); + preparedStatement.setParameters(parameters); + } + } + + /** + * Bind a TypedValue to the given index on the FieldVctor. + * + * @param vector FieldVector to bind to. + * @param typedValue TypedValue to bind to the vector. + * @param index Vector index to bind the value at. + */ + private void bind(FieldVector vector, TypedValue typedValue, int index) { + try { + if (typedValue.value == null) { + if (vector.getField().isNullable()) { + vector.setNull(index); + } else { + throw new UnsupportedOperationException("Can't set null on non-nullable parameter"); + } + } else if (!vector.getField().getType().accept(new BinderVisitor(vector, typedValue, index))) { + throw new UnsupportedOperationException( + String.format("Binding to vector type %s is not yet supported", vector.getClass())); + } + } catch (ClassCastException e) { + throw new UnsupportedOperationException( + String.format("Binding value of type %s is not yet supported for expected Arrow type %s", + typedValue.type, vector.getField().getType())); + } + } + + private static class BinderVisitor implements ArrowType.ArrowTypeVisitor { + private final FieldVector vector; + private final TypedValue typedValue; + private final int index; + + private BinderVisitor(FieldVector vector, TypedValue value, int index) { + this.vector = vector; + this.typedValue = value; + this.index = index; + } + + @Override + public Boolean visit(ArrowType.Null type) { + return new NullAvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.Struct type) { + return new StructAvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.List type) { + return new ListAvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.LargeList type) { + return new LargeListAvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.FixedSizeList type) { + return new FixedSizeListAvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.Union type) { + return new UnionAvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.Map type) { + return new MapAvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.Int type) { + return new IntAvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.FloatingPoint type) { + return new FloatingPointAvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.Utf8 type) { + return new Utf8AvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.LargeUtf8 type) { + return new LargeUtf8AvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.Binary type) { + return new BinaryAvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.LargeBinary type) { + return new LargeBinaryAvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.FixedSizeBinary type) { + return new FixedSizeBinaryAvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.Bool type) { + return new BoolAvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.Decimal type) { + return new DecimalAvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.Date type) { + return new DateAvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.Time type) { + return new TimeAvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.Timestamp type) { + return new TimestampAvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.Interval type) { + return new IntervalAvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + + @Override + public Boolean visit(ArrowType.Duration type) { + return new DurationAvaticaParameterConverter(type).bindParameter(vector, typedValue, index); + } + } + +} diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/ConvertUtils.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/ConvertUtils.java index 324f991ef09e9..b21b03340e9f9 100644 --- a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/ConvertUtils.java +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/ConvertUtils.java @@ -22,15 +22,37 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import org.apache.arrow.driver.jdbc.converter.impl.BinaryAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.BoolAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.DateAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.DecimalAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.DurationAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.FixedSizeBinaryAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.FixedSizeListAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.FloatingPointAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.IntAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.IntervalAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.LargeBinaryAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.LargeListAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.LargeUtf8AvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.ListAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.MapAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.NullAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.StructAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.TimeAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.TimestampAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.UnionAvaticaParameterConverter; +import org.apache.arrow.driver.jdbc.converter.impl.Utf8AvaticaParameterConverter; import org.apache.arrow.flight.sql.FlightSqlColumnMetadata; import org.apache.arrow.vector.types.pojo.ArrowType; import org.apache.arrow.vector.types.pojo.Field; +import org.apache.calcite.avatica.AvaticaParameter; import org.apache.calcite.avatica.ColumnMetaData; import org.apache.calcite.avatica.proto.Common; import org.apache.calcite.avatica.proto.Common.ColumnMetaData.Builder; /** - * Convert Fields To Column MetaData List functions. + * Convert objects between Arrow and Avatica. */ public final class ConvertUtils { @@ -113,4 +135,134 @@ public static void setOnColumnMetaDataBuilder(final Builder builder, builder.setSearchable(searchable); } } + + /** + * Convert Fields To Avatica Parameters. + * + * @param fields list of {@link Field}. + * @return list of {@link AvaticaParameter}. + */ + public static List convertArrowFieldsToAvaticaParameters(final List fields) { + return fields.stream() + .map(field -> field.getType().accept(new ConverterVisitor(field))) + .collect(Collectors.toList()); + } + + private static class ConverterVisitor implements ArrowType.ArrowTypeVisitor { + private final Field field; + + private ConverterVisitor(Field field) { + this.field = field; + } + + @Override + public AvaticaParameter visit(ArrowType.Null type) { + return new NullAvaticaParameterConverter(type).createParameter(field); + } + + @Override + public AvaticaParameter visit(ArrowType.Struct type) { + return new StructAvaticaParameterConverter(type).createParameter(field); + } + + @Override + public AvaticaParameter visit(ArrowType.List type) { + return new ListAvaticaParameterConverter(type).createParameter(field); + + } + + @Override + public AvaticaParameter visit(ArrowType.LargeList type) { + return new LargeListAvaticaParameterConverter(type).createParameter(field); + + } + + @Override + public AvaticaParameter visit(ArrowType.FixedSizeList type) { + return new FixedSizeListAvaticaParameterConverter(type).createParameter(field); + + } + + @Override + public AvaticaParameter visit(ArrowType.Union type) { + return new UnionAvaticaParameterConverter(type).createParameter(field); + + } + + @Override + public AvaticaParameter visit(ArrowType.Map type) { + return new MapAvaticaParameterConverter(type).createParameter(field); + } + + @Override + public AvaticaParameter visit(ArrowType.Int type) { + return new IntAvaticaParameterConverter(type).createParameter(field); + } + + @Override + public AvaticaParameter visit(ArrowType.FloatingPoint type) { + return new FloatingPointAvaticaParameterConverter(type).createParameter(field); + } + + @Override + public AvaticaParameter visit(ArrowType.Utf8 type) { + return new Utf8AvaticaParameterConverter(type).createParameter(field); + } + + @Override + public AvaticaParameter visit(ArrowType.LargeUtf8 type) { + return new LargeUtf8AvaticaParameterConverter(type).createParameter(field); + } + + @Override + public AvaticaParameter visit(ArrowType.Binary type) { + return new BinaryAvaticaParameterConverter(type).createParameter(field); + } + + @Override + public AvaticaParameter visit(ArrowType.LargeBinary type) { + return new LargeBinaryAvaticaParameterConverter(type).createParameter(field); + } + + @Override + public AvaticaParameter visit(ArrowType.FixedSizeBinary type) { + return new FixedSizeBinaryAvaticaParameterConverter(type).createParameter(field); + } + + @Override + public AvaticaParameter visit(ArrowType.Bool type) { + return new BoolAvaticaParameterConverter(type).createParameter(field); + } + + @Override + public AvaticaParameter visit(ArrowType.Decimal type) { + return new DecimalAvaticaParameterConverter(type).createParameter(field); + } + + @Override + public AvaticaParameter visit(ArrowType.Date type) { + return new DateAvaticaParameterConverter(type).createParameter(field); + } + + @Override + public AvaticaParameter visit(ArrowType.Time type) { + return new TimeAvaticaParameterConverter(type).createParameter(field); + } + + @Override + public AvaticaParameter visit(ArrowType.Timestamp type) { + return new TimestampAvaticaParameterConverter(type).createParameter(field); + } + + @Override + public AvaticaParameter visit(ArrowType.Interval type) { + return new IntervalAvaticaParameterConverter(type).createParameter(field); + } + + @Override + public AvaticaParameter visit(ArrowType.Duration type) { + return new DurationAvaticaParameterConverter(type).createParameter(field); + } + } + } diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/FlightStreamQueue.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/FlightEndpointDataQueue.java similarity index 73% rename from java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/FlightStreamQueue.java rename to java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/FlightEndpointDataQueue.java index e1d770800e40c..1198d89c40aef 100644 --- a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/FlightStreamQueue.java +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/FlightEndpointDataQueue.java @@ -36,6 +36,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import org.apache.arrow.driver.jdbc.client.CloseableEndpointStreamPair; import org.apache.arrow.flight.CallStatus; import org.apache.arrow.flight.FlightRuntimeException; import org.apache.arrow.flight.FlightStream; @@ -55,28 +56,28 @@ *

  • Repeat from (3) until next() returns null.
  • * */ -public class FlightStreamQueue implements AutoCloseable { - private static final Logger LOGGER = LoggerFactory.getLogger(FlightStreamQueue.class); - private final CompletionService completionService; - private final Set> futures = synchronizedSet(new HashSet<>()); - private final Set allStreams = synchronizedSet(new HashSet<>()); +public class FlightEndpointDataQueue implements AutoCloseable { + private static final Logger LOGGER = LoggerFactory.getLogger(FlightEndpointDataQueue.class); + private final CompletionService completionService; + private final Set> futures = synchronizedSet(new HashSet<>()); + private final Set endpointsToClose = synchronizedSet(new HashSet<>()); private final AtomicBoolean closed = new AtomicBoolean(); /** * Instantiate a new FlightStreamQueue. */ - protected FlightStreamQueue(final CompletionService executorService) { + protected FlightEndpointDataQueue(final CompletionService executorService) { completionService = checkNotNull(executorService); } /** - * Creates a new {@link FlightStreamQueue} from the provided {@link ExecutorService}. + * Creates a new {@link FlightEndpointDataQueue} from the provided {@link ExecutorService}. * * @param service the service from which to create a new queue. * @return a new queue. */ - public static FlightStreamQueue createNewQueue(final ExecutorService service) { - return new FlightStreamQueue(new ExecutorCompletionService<>(service)); + public static FlightEndpointDataQueue createNewQueue(final ExecutorService service) { + return new FlightEndpointDataQueue(new ExecutorCompletionService<>(service)); } /** @@ -92,19 +93,20 @@ public boolean isClosed() { * Auxiliary functional interface for getting ready-to-consume FlightStreams. */ @FunctionalInterface - interface FlightStreamSupplier { - Future get() throws SQLException; + interface EndpointStreamSupplier { + Future get() throws SQLException; } - private FlightStream next(final FlightStreamSupplier flightStreamSupplier) throws SQLException { + private CloseableEndpointStreamPair next(final EndpointStreamSupplier endpointStreamSupplier) throws SQLException { checkOpen(); while (!futures.isEmpty()) { - final Future future = flightStreamSupplier.get(); + final Future future = endpointStreamSupplier.get(); futures.remove(future); try { - final FlightStream stream = future.get(); - if (stream.getRoot().getRowCount() > 0) { - return stream; + final CloseableEndpointStreamPair endpoint = future.get(); + // Get the next FlightStream that has a root with content. + if (endpoint != null) { + return endpoint; } } catch (final ExecutionException | InterruptedException | CancellationException e) { throw AvaticaConnection.HELPER.wrap(e.getMessage(), e); @@ -120,11 +122,11 @@ private FlightStream next(final FlightStreamSupplier flightStreamSupplier) throw * @param timeoutUnit the timeoutValue time unit * @return a FlightStream that is ready to consume or null if all FlightStreams are ended. */ - public FlightStream next(final long timeoutValue, final TimeUnit timeoutUnit) + public CloseableEndpointStreamPair next(final long timeoutValue, final TimeUnit timeoutUnit) throws SQLException { return next(() -> { try { - final Future future = completionService.poll(timeoutValue, timeoutUnit); + final Future future = completionService.poll(timeoutValue, timeoutUnit); if (future != null) { return future; } @@ -142,7 +144,7 @@ public FlightStream next(final long timeoutValue, final TimeUnit timeoutUnit) * * @return a FlightStream that is ready to consume or null if all FlightStreams are ended. */ - public FlightStream next() throws SQLException { + public CloseableEndpointStreamPair next() throws SQLException { return next(() -> { try { return completionService.take(); @@ -162,21 +164,25 @@ public synchronized void checkOpen() { /** * Readily adds given {@link FlightStream}s to the queue. */ - public void enqueue(final Collection flightStreams) { - flightStreams.forEach(this::enqueue); + public void enqueue(final Collection endpointRequests) { + endpointRequests.forEach(this::enqueue); } /** * Adds given {@link FlightStream} to the queue. */ - public synchronized void enqueue(final FlightStream flightStream) { - checkNotNull(flightStream); + public synchronized void enqueue(final CloseableEndpointStreamPair endpointRequest) { + checkNotNull(endpointRequest); checkOpen(); - allStreams.add(flightStream); + endpointsToClose.add(endpointRequest); futures.add(completionService.submit(() -> { // `FlightStream#next` will block until new data can be read or stream is over. - flightStream.next(); - return flightStream; + while (endpointRequest.getStream().next()) { + if (endpointRequest.getStream().getRoot().getRowCount() > 0) { + return endpointRequest; + } + } + return null; })); } @@ -187,14 +193,15 @@ private static boolean isCallStatusCancelled(final Exception e) { @Override public synchronized void close() throws SQLException { - final Set exceptions = new HashSet<>(); if (isClosed()) { return; } + + final Set exceptions = new HashSet<>(); try { - for (final FlightStream flightStream : allStreams) { + for (final CloseableEndpointStreamPair endpointToClose : endpointsToClose) { try { - flightStream.cancel("Cancelling this FlightStream.", null); + endpointToClose.getStream().cancel("Cancelling this FlightStream.", null); } catch (final Exception e) { final String errorMsg = "Failed to cancel a FlightStream."; LOGGER.error(errorMsg, e); @@ -214,9 +221,9 @@ public synchronized void close() throws SQLException { } } }); - for (final FlightStream flightStream : allStreams) { + for (final CloseableEndpointStreamPair endpointToClose : endpointsToClose) { try { - flightStream.close(); + endpointToClose.close(); } catch (final Exception e) { final String errorMsg = "Failed to close a FlightStream."; LOGGER.error(errorMsg, e); @@ -224,7 +231,7 @@ public synchronized void close() throws SQLException { } } } finally { - allStreams.clear(); + endpointsToClose.clear(); futures.clear(); closed.set(true); } diff --git a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/IntervalStringUtils.java b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/IntervalStringUtils.java index 05643274ac348..fdf6c508d93b0 100644 --- a/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/IntervalStringUtils.java +++ b/java/flight/flight-sql-jdbc-core/src/main/java/org/apache/arrow/driver/jdbc/utils/IntervalStringUtils.java @@ -17,8 +17,10 @@ package org.apache.arrow.driver.jdbc.utils; +import java.time.Duration; +import java.time.Period; + import org.apache.arrow.vector.util.DateUtility; -import org.joda.time.Period; /** * Utility class to format periods similar to Oracle's representation @@ -36,7 +38,7 @@ private IntervalStringUtils( ) {} * For example, the string "+21-02" defines an interval of 21 years and 2 months. */ public static String formatIntervalYear(final Period p) { - long months = p.getYears() * (long) DateUtility.yearsToMonths + p.getMonths(); + long months = p.toTotalMonths(); boolean neg = false; if (months < 0) { months = -months; @@ -53,8 +55,8 @@ public static String formatIntervalYear(final Period p) { * For example, the string "-001 18:25:16.766" defines an interval of * - 1 day 18 hours 25 minutes 16 seconds and 766 milliseconds. */ - public static String formatIntervalDay(final Period p) { - long millis = p.getDays() * (long) DateUtility.daysToStandardMillis + millisFromPeriod(p); + public static String formatIntervalDay(final Duration d) { + long millis = d.toMillis(); boolean neg = false; if (millis < 0) { @@ -76,9 +78,4 @@ public static String formatIntervalDay(final Period p) { return String.format("%c%03d %02d:%02d:%02d.%03d", neg ? '-' : '+', days, hours, minutes, seconds, millis); } - - public static int millisFromPeriod(Period period) { - return period.getHours() * DateUtility.hoursToMillis + period.getMinutes() * DateUtility.minutesToMillis + - period.getSeconds() * DateUtility.secondsToMillis + period.getMillis(); - } } diff --git a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ArrowFlightPreparedStatementTest.java b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ArrowFlightPreparedStatementTest.java index df2577e955881..b19f049544ada 100644 --- a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ArrowFlightPreparedStatementTest.java +++ b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ArrowFlightPreparedStatementTest.java @@ -20,14 +20,26 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.jupiter.api.Assertions.assertEquals; +import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; +import java.util.Arrays; +import java.util.Collections; import org.apache.arrow.driver.jdbc.utils.CoreMockedSqlProducers; import org.apache.arrow.driver.jdbc.utils.MockFlightSqlProducer; import org.apache.arrow.flight.sql.FlightSqlUtils; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.Schema; +import org.apache.arrow.vector.util.Text; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -73,6 +85,39 @@ public void testSimpleQueryNoParameterBinding() throws SQLException { } } + @Test + public void testQueryWithParameterBinding() throws SQLException { + final String query = "Fake query with parameters"; + final Schema schema = new Schema(Collections.singletonList(Field.nullable("", Types.MinorType.INT.getType()))); + PRODUCER.addSelectQuery(query, schema, + Collections.singletonList(listener -> { + try (final BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); + final VectorSchemaRoot root = VectorSchemaRoot.create(schema, + allocator)) { + ((IntVector) root.getVector(0)).setSafe(0, 10); + root.setRowCount(1); + listener.start(root); + listener.putNext(); + } catch (final Throwable throwable) { + listener.error(throwable); + } finally { + listener.completed(); + } + })); + + PRODUCER.addExpectedParameters(query, + new Schema(Collections.singletonList(Field.nullable("", ArrowType.Utf8.INSTANCE))), + Collections.singletonList(Collections.singletonList(new Text("foo".getBytes(StandardCharsets.UTF_8))))); + try (final PreparedStatement preparedStatement = connection.prepareStatement(query)) { + preparedStatement.setString(1, "foo"); + try (final ResultSet resultSet = preparedStatement.executeQuery()) { + resultSet.next(); + assert true; + } + } + } + + @Test @Ignore("https://github.com/apache/arrow/issues/34741: flaky test") public void testPreparedStatementExecutionOnce() throws SQLException { @@ -107,4 +152,39 @@ public void testUpdateQuery() throws SQLException { assertEquals(42, updated); } } + + @Test + public void testUpdateQueryWithParameters() throws SQLException { + String query = "Fake update with parameters"; + PRODUCER.addUpdateQuery(query, /*updatedRows*/42); + PRODUCER.addExpectedParameters(query, + new Schema(Collections.singletonList(Field.nullable("", ArrowType.Utf8.INSTANCE))), + Collections.singletonList(Collections.singletonList(new Text("foo".getBytes(StandardCharsets.UTF_8))))); + try (final PreparedStatement stmt = connection.prepareStatement(query)) { + // TODO: make sure this is validated on the server too + stmt.setString(1, "foo"); + int updated = stmt.executeUpdate(); + assertEquals(42, updated); + } + } + + @Test + public void testUpdateQueryWithBatchedParameters() throws SQLException { + String query = "Fake update with batched parameters"; + PRODUCER.addUpdateQuery(query, /*updatedRows*/42); + PRODUCER.addExpectedParameters(query, + new Schema(Collections.singletonList(Field.nullable("", ArrowType.Utf8.INSTANCE))), + Arrays.asList( + Collections.singletonList(new Text("foo".getBytes(StandardCharsets.UTF_8))), + Collections.singletonList(new Text("bar".getBytes(StandardCharsets.UTF_8))))); + try (final PreparedStatement stmt = connection.prepareStatement(query)) { + // TODO: make sure this is validated on the server too + stmt.setString(1, "foo"); + stmt.addBatch(); + stmt.setString(1, "bar"); + stmt.addBatch(); + int[] updated = stmt.executeBatch(); + assertEquals(42, updated[0]); + } + } } diff --git a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ConnectionMutualTlsTest.java b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ConnectionMutualTlsTest.java new file mode 100644 index 0000000000000..927b3e426c6ba --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ConnectionMutualTlsTest.java @@ -0,0 +1,435 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThrows; + +import java.io.File; +import java.net.URLEncoder; +import java.sql.Connection; +import java.sql.Driver; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.util.Properties; + +import org.apache.arrow.driver.jdbc.authentication.UserPasswordAuthentication; +import org.apache.arrow.driver.jdbc.client.ArrowFlightSqlClientHandler; +import org.apache.arrow.driver.jdbc.utils.ArrowFlightConnectionConfigImpl.ArrowFlightConnectionProperty; +import org.apache.arrow.driver.jdbc.utils.FlightSqlTestCertificates; +import org.apache.arrow.driver.jdbc.utils.MockFlightSqlProducer; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.util.AutoCloseables; +import org.apache.calcite.avatica.org.apache.http.auth.UsernamePasswordCredentials; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; + +/** + * Tests encrypted connections. + */ +public class ConnectionMutualTlsTest { + + @ClassRule + public static final FlightServerTestRule FLIGHT_SERVER_TEST_RULE; + private static final String tlsRootCertsPath; + + private static final String serverMTlsCACertPath; + private static final String clientMTlsCertPath; + private static final String badClientMTlsCertPath; + private static final String clientMTlsKeyPath; + private static final String badClientMTlsKeyPath; + private static final MockFlightSqlProducer PRODUCER = new MockFlightSqlProducer(); + private static final String userTest = "user1"; + private static final String passTest = "pass1"; + + static { + final FlightSqlTestCertificates.CertKeyPair + certKey = FlightSqlTestCertificates.exampleTlsCerts().get(0); + + tlsRootCertsPath = certKey.cert.getPath(); + + final File serverMTlsCACert = FlightSqlTestCertificates.exampleCACert(); + + serverMTlsCACertPath = serverMTlsCACert.getPath(); + + final FlightSqlTestCertificates.CertKeyPair + clientMTlsCertKey = FlightSqlTestCertificates.exampleTlsCerts().get(1); + + clientMTlsCertPath = clientMTlsCertKey.cert.getPath(); + clientMTlsKeyPath = clientMTlsCertKey.key.getPath(); + + badClientMTlsCertPath = clientMTlsCertPath + ".bad"; + badClientMTlsKeyPath = clientMTlsKeyPath + ".bad"; + + UserPasswordAuthentication authentication = new UserPasswordAuthentication.Builder() + .user(userTest, passTest) + .build(); + + FLIGHT_SERVER_TEST_RULE = new FlightServerTestRule.Builder() + .authentication(authentication) + .useEncryption(certKey.cert, certKey.key) + .useMTlsClientVerification(serverMTlsCACert) + .producer(PRODUCER) + .build(); + } + + private BufferAllocator allocator; + + @Before + public void setUp() throws Exception { + allocator = new RootAllocator(Long.MAX_VALUE); + } + + @After + public void tearDown() throws Exception { + allocator.getChildAllocators().forEach(BufferAllocator::close); + AutoCloseables.close(allocator); + } + + /** + * Try to instantiate an encrypted FlightClient. + * + * @throws Exception on error. + */ + @Test + public void testGetEncryptedClientAuthenticated() throws Exception { + final UsernamePasswordCredentials credentials = new UsernamePasswordCredentials( + userTest, passTest); + + try (ArrowFlightSqlClientHandler client = + new ArrowFlightSqlClientHandler.Builder() + .withHost(FLIGHT_SERVER_TEST_RULE.getHost()) + .withPort(FLIGHT_SERVER_TEST_RULE.getPort()) + .withUsername(credentials.getUserName()) + .withPassword(credentials.getPassword()) + .withTlsRootCertificates(tlsRootCertsPath) + .withClientCertificate(clientMTlsCertPath) + .withClientKey(clientMTlsKeyPath) + .withBufferAllocator(allocator) + .withEncryption(true) + .build()) { + assertNotNull(client); + } + } + + /** + * Try to instantiate an encrypted FlightClient providing a bad mTLS Cert Path. It's expected to + * receive the SQLException. + */ + @Test + public void testGetEncryptedClientWithBadMTlsCertPath() { + final UsernamePasswordCredentials credentials = new UsernamePasswordCredentials( + userTest, passTest); + + assertThrows(SQLException.class, () -> { + try (ArrowFlightSqlClientHandler handler = new ArrowFlightSqlClientHandler.Builder() + .withHost(FLIGHT_SERVER_TEST_RULE.getHost()) + .withPort(FLIGHT_SERVER_TEST_RULE.getPort()) + .withUsername(credentials.getUserName()) + .withPassword(credentials.getPassword()) + .withTlsRootCertificates(tlsRootCertsPath) + .withClientCertificate(badClientMTlsCertPath) + .withClientKey(clientMTlsKeyPath) + .withBufferAllocator(allocator) + .withEncryption(true) + .build()) { + Assert.fail(); + } + }); + } + + /** + * Try to instantiate an encrypted FlightClient providing a bad mTLS Key Path. It's expected to + * receive the SQLException. + */ + @Test + public void testGetEncryptedClientWithBadMTlsKeyPath() { + final UsernamePasswordCredentials credentials = new UsernamePasswordCredentials( + userTest, passTest); + + assertThrows(SQLException.class, () -> { + try (ArrowFlightSqlClientHandler handler = new ArrowFlightSqlClientHandler.Builder() + .withHost(FLIGHT_SERVER_TEST_RULE.getHost()) + .withPort(FLIGHT_SERVER_TEST_RULE.getPort()) + .withUsername(credentials.getUserName()) + .withPassword(credentials.getPassword()) + .withTlsRootCertificates(tlsRootCertsPath) + .withClientCertificate(clientMTlsCertPath) + .withClientKey(badClientMTlsKeyPath) + .withBufferAllocator(allocator) + .withEncryption(true) + .build()) { + Assert.fail(); + } + }); + } + + /** + * Try to instantiate an encrypted FlightClient without credentials. + * + * @throws Exception on error. + */ + @Test + public void testGetNonAuthenticatedEncryptedClientNoAuth() throws Exception { + try (ArrowFlightSqlClientHandler client = + new ArrowFlightSqlClientHandler.Builder() + .withHost(FLIGHT_SERVER_TEST_RULE.getHost()) + .withTlsRootCertificates(tlsRootCertsPath) + .withClientCertificate(clientMTlsCertPath) + .withClientKey(clientMTlsKeyPath) + .withBufferAllocator(allocator) + .withEncryption(true) + .build()) { + assertNotNull(client); + } + } + + /** + * Check if an encrypted connection can be established successfully when the + * provided valid credentials and a valid TLS Root Certs path. + * + * @throws Exception on error. + */ + @Test + public void testGetEncryptedConnectionWithValidCredentialsAndTlsRootsPath() throws Exception { + final Properties properties = new Properties(); + + properties.put(ArrowFlightConnectionProperty.HOST.camelName(), "localhost"); + properties.put(ArrowFlightConnectionProperty.PORT.camelName(), + FLIGHT_SERVER_TEST_RULE.getPort()); + properties.put(ArrowFlightConnectionProperty.USER.camelName(), + userTest); + properties.put(ArrowFlightConnectionProperty.PASSWORD.camelName(), + passTest); + properties.put(ArrowFlightConnectionProperty.TLS_ROOT_CERTS.camelName(), + tlsRootCertsPath); + properties.put(ArrowFlightConnectionProperty.CLIENT_CERTIFICATE.camelName(), + clientMTlsCertPath); + properties.put(ArrowFlightConnectionProperty.CLIENT_KEY.camelName(), + clientMTlsKeyPath); + + final ArrowFlightJdbcDataSource dataSource = + ArrowFlightJdbcDataSource.createNewDataSource(properties); + try (final Connection connection = dataSource.getConnection()) { + Assert.assertTrue(connection.isValid(300)); + } + } + + /** + * Check if an encrypted connection can be established successfully when not + * providing authentication. + * + * @throws Exception on error. + */ + @Test + public void testGetNonAuthenticatedEncryptedConnection() throws Exception { + final Properties properties = new Properties(); + + properties.put(ArrowFlightConnectionProperty.HOST.camelName(), FLIGHT_SERVER_TEST_RULE.getHost()); + properties.put(ArrowFlightConnectionProperty.PORT.camelName(), FLIGHT_SERVER_TEST_RULE.getPort()); + properties.put(ArrowFlightConnectionProperty.USE_ENCRYPTION.camelName(), true); + properties.put(ArrowFlightConnectionProperty.TLS_ROOT_CERTS.camelName(), tlsRootCertsPath); + properties.put(ArrowFlightConnectionProperty.CLIENT_CERTIFICATE.camelName(), clientMTlsCertPath); + properties.put(ArrowFlightConnectionProperty.CLIENT_KEY.camelName(), clientMTlsKeyPath); + + final ArrowFlightJdbcDataSource dataSource = ArrowFlightJdbcDataSource.createNewDataSource(properties); + try (final Connection connection = dataSource.getConnection()) { + Assert.assertTrue(connection.isValid(300)); + } + } + + /** + * Check if an encrypted connection can be established successfully when connecting through + * the DriverManager using just a connection url. + * + * @throws Exception on error. + */ + @Test + public void testTLSConnectionPropertyTrueCorrectCastUrlWithDriverManager() throws Exception { + final Driver driver = new ArrowFlightJdbcDriver(); + DriverManager.registerDriver(driver); + + final String jdbcUrl = String.format( + "jdbc:arrow-flight-sql://localhost:%s?user=%s&password=%s" + + "&useEncryption=true&%s=%s&%s=%s&%s=%s", + FLIGHT_SERVER_TEST_RULE.getPort(), + userTest, + passTest, + ArrowFlightConnectionProperty.TLS_ROOT_CERTS.camelName(), + URLEncoder.encode(tlsRootCertsPath, "UTF-8"), + ArrowFlightConnectionProperty.CLIENT_CERTIFICATE.camelName(), + URLEncoder.encode(clientMTlsCertPath, "UTF-8"), + ArrowFlightConnectionProperty.CLIENT_KEY.camelName(), + URLEncoder.encode(clientMTlsKeyPath, "UTF-8")); + + try (Connection connection = DriverManager.getConnection(jdbcUrl)) { + Assert.assertTrue(connection.isValid(0)); + } + } + + /** + * Check if an encrypted connection can be established successfully when connecting through the DriverManager using + * a connection url and properties with String K-V pairs. + * + * @throws Exception on error. + */ + @Test + public void testTLSConnectionPropertyTrueCorrectCastUrlAndPropertiesUsingSetPropertyWithDriverManager() + throws Exception { + final Driver driver = new ArrowFlightJdbcDriver(); + DriverManager.registerDriver(driver); + + Properties properties = new Properties(); + + properties.setProperty(ArrowFlightConnectionProperty.USER.camelName(), userTest); + properties.setProperty(ArrowFlightConnectionProperty.PASSWORD.camelName(), passTest); + properties.setProperty(ArrowFlightConnectionProperty.TLS_ROOT_CERTS.camelName(), tlsRootCertsPath); + properties.setProperty(ArrowFlightConnectionProperty.USE_ENCRYPTION.camelName(), "true"); + properties.setProperty(ArrowFlightConnectionProperty.CLIENT_CERTIFICATE.camelName(), clientMTlsCertPath); + properties.setProperty(ArrowFlightConnectionProperty.CLIENT_KEY.camelName(), clientMTlsKeyPath); + + final String jdbcUrl = String.format( + "jdbc:arrow-flight-sql://localhost:%s", + FLIGHT_SERVER_TEST_RULE.getPort()); + + try (Connection connection = DriverManager.getConnection(jdbcUrl, properties)) { + Assert.assertTrue(connection.isValid(0)); + } + } + + /** + * Check if an encrypted connection can be established successfully when connecting through the DriverManager using + * a connection url and properties with Object K-V pairs. + * + * @throws Exception on error. + */ + @Test + public void testTLSConnectionPropertyTrueCorrectCastUrlAndPropertiesUsingPutWithDriverManager() + throws Exception { + final Driver driver = new ArrowFlightJdbcDriver(); + DriverManager.registerDriver(driver); + + Properties properties = new Properties(); + + properties.put(ArrowFlightConnectionProperty.USER.camelName(), userTest); + properties.put(ArrowFlightConnectionProperty.PASSWORD.camelName(), passTest); + properties.put(ArrowFlightConnectionProperty.USE_ENCRYPTION.camelName(), true); + properties.put(ArrowFlightConnectionProperty.TLS_ROOT_CERTS.camelName(), tlsRootCertsPath); + properties.put(ArrowFlightConnectionProperty.CLIENT_CERTIFICATE.camelName(), clientMTlsCertPath); + properties.put(ArrowFlightConnectionProperty.CLIENT_KEY.camelName(), clientMTlsKeyPath); + + final String jdbcUrl = String.format( + "jdbc:arrow-flight-sql://localhost:%s", + FLIGHT_SERVER_TEST_RULE.getPort()); + + try (Connection connection = DriverManager.getConnection(jdbcUrl, properties)) { + Assert.assertTrue(connection.isValid(0)); + } + } + + /** + * Check if an encrypted connection can be established successfully when connecting through the DriverManager using + * just a connection url and using 0 and 1 as ssl values. + * + * @throws Exception on error. + */ + @Test + public void testTLSConnectionPropertyTrueIntegerCorrectCastUrlWithDriverManager() + throws Exception { + final Driver driver = new ArrowFlightJdbcDriver(); + DriverManager.registerDriver(driver); + + final String jdbcUrl = String.format( + "jdbc:arrow-flight-sql://localhost:%s?user=%s&password=%s" + + "&useEncryption=1&useSystemTrustStore=0&%s=%s&%s=%s&%s=%s", + FLIGHT_SERVER_TEST_RULE.getPort(), + userTest, + passTest, + ArrowFlightConnectionProperty.TLS_ROOT_CERTS.camelName(), + URLEncoder.encode(tlsRootCertsPath, "UTF-8"), + ArrowFlightConnectionProperty.CLIENT_CERTIFICATE.camelName(), + URLEncoder.encode(clientMTlsCertPath, "UTF-8"), + ArrowFlightConnectionProperty.CLIENT_KEY.camelName(), + URLEncoder.encode(clientMTlsKeyPath, "UTF-8")); + + try (Connection connection = DriverManager.getConnection(jdbcUrl)) { + Assert.assertTrue(connection.isValid(0)); + } + } + + /** + * Check if an encrypted connection can be established successfully when connecting through the DriverManager using + * a connection url and properties with String K-V pairs and using 0 and 1 as ssl values. + * + * @throws Exception on error. + */ + @Test + public void testTLSConnectionPropertyTrueIntegerCorrectCastUrlAndPropertiesUsingSetPropertyWithDriverManager() + throws Exception { + final Driver driver = new ArrowFlightJdbcDriver(); + DriverManager.registerDriver(driver); + + Properties properties = new Properties(); + + properties.setProperty(ArrowFlightConnectionProperty.USER.camelName(), userTest); + properties.setProperty(ArrowFlightConnectionProperty.PASSWORD.camelName(), passTest); + properties.setProperty(ArrowFlightConnectionProperty.TLS_ROOT_CERTS.camelName(), tlsRootCertsPath); + properties.setProperty(ArrowFlightConnectionProperty.USE_ENCRYPTION.camelName(), "1"); + properties.setProperty(ArrowFlightConnectionProperty.CLIENT_CERTIFICATE.camelName(), clientMTlsCertPath); + properties.setProperty(ArrowFlightConnectionProperty.CLIENT_KEY.camelName(), clientMTlsKeyPath); + + final String jdbcUrl = String.format("jdbc:arrow-flight-sql://localhost:%s", FLIGHT_SERVER_TEST_RULE.getPort()); + + try (Connection connection = DriverManager.getConnection(jdbcUrl, properties)) { + Assert.assertTrue(connection.isValid(0)); + } + } + + /** + * Check if an encrypted connection can be established successfully when connecting through the DriverManager using + * a connection url and properties with Object K-V pairs and using 0 and 1 as ssl values. + * + * @throws Exception on error. + */ + @Test + public void testTLSConnectionPropertyTrueIntegerCorrectCastUrlAndPropertiesUsingPutWithDriverManager() + throws Exception { + final Driver driver = new ArrowFlightJdbcDriver(); + DriverManager.registerDriver(driver); + + Properties properties = new Properties(); + + properties.put(ArrowFlightConnectionProperty.USER.camelName(), userTest); + properties.put(ArrowFlightConnectionProperty.PASSWORD.camelName(), passTest); + properties.put(ArrowFlightConnectionProperty.USE_ENCRYPTION.camelName(), 1); + properties.put(ArrowFlightConnectionProperty.TLS_ROOT_CERTS.camelName(), tlsRootCertsPath); + properties.put(ArrowFlightConnectionProperty.CLIENT_CERTIFICATE.camelName(), clientMTlsCertPath); + properties.put(ArrowFlightConnectionProperty.CLIENT_KEY.camelName(), clientMTlsKeyPath); + + final String jdbcUrl = String.format("jdbc:arrow-flight-sql://localhost:%s", + FLIGHT_SERVER_TEST_RULE.getPort()); + + try (Connection connection = DriverManager.getConnection(jdbcUrl, properties)) { + Assert.assertTrue(connection.isValid(0)); + } + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ConnectionTest.java b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ConnectionTest.java index bec0ff1e59752..357506b3d177c 100644 --- a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ConnectionTest.java +++ b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ConnectionTest.java @@ -20,6 +20,7 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import java.net.URISyntaxException; import java.sql.Connection; @@ -100,7 +101,7 @@ public void testUnencryptedConnectionShouldOpenSuccessfullyWhenProvidedValidCred try (Connection connection = DriverManager.getConnection( "jdbc:arrow-flight-sql://" + FLIGHT_SERVER_TEST_RULE.getHost() + ":" + FLIGHT_SERVER_TEST_RULE.getPort(), properties)) { - assert connection.isValid(300); + Assert.assertTrue(connection.isValid(300)); } } @@ -122,11 +123,13 @@ public void testTokenOverridesUsernameAndPasswordAuth() { properties.put(ArrowFlightConnectionProperty.TOKEN.camelName(), "token"); properties.put("useEncryption", false); - SQLException e = assertThrows(SQLException.class, () -> - DriverManager.getConnection( - "jdbc:arrow-flight-sql://" + FLIGHT_SERVER_TEST_RULE.getHost() + ":" + - FLIGHT_SERVER_TEST_RULE.getPort(), - properties)); + SQLException e = assertThrows(SQLException.class, () -> { + try (Connection conn = DriverManager.getConnection( + "jdbc:arrow-flight-sql://" + FLIGHT_SERVER_TEST_RULE.getHost() + ":" + FLIGHT_SERVER_TEST_RULE.getPort(), + properties)) { + Assert.fail(); + } + }); assertTrue(e.getMessage().contains("UNAUTHENTICATED")); } @@ -145,7 +148,9 @@ public void testUnencryptedConnectionWithEmptyHost() properties.put("password", passTest); final String invalidUrl = "jdbc:arrow-flight-sql://"; - DriverManager.getConnection(invalidUrl, properties); + try (Connection conn = DriverManager.getConnection(invalidUrl, properties)) { + Assert.fail("Expected SQLException."); + } } /** @@ -161,6 +166,7 @@ public void testGetBasicClientAuthenticatedShouldOpenConnection() new ArrowFlightSqlClientHandler.Builder() .withHost(FLIGHT_SERVER_TEST_RULE.getHost()) .withPort(FLIGHT_SERVER_TEST_RULE.getPort()) + .withEncryption(false) .withUsername(userTest) .withPassword(passTest) .withBufferAllocator(allocator) @@ -191,7 +197,9 @@ public void testUnencryptedConnectionProvidingInvalidPort() final String invalidUrl = "jdbc:arrow-flight-sql://" + FLIGHT_SERVER_TEST_RULE.getHost() + ":" + 65537; - DriverManager.getConnection(invalidUrl, properties); + try (Connection conn = DriverManager.getConnection(invalidUrl, properties)) { + fail("Expected SQLException"); + } } /** @@ -206,6 +214,7 @@ public void testGetBasicClientNoAuthShouldOpenConnection() throws Exception { new ArrowFlightSqlClientHandler.Builder() .withHost(FLIGHT_SERVER_TEST_RULE.getHost()) .withBufferAllocator(allocator) + .withEncryption(false) .build()) { assertNotNull(client); } @@ -228,7 +237,7 @@ public void testUnencryptedConnectionShouldOpenSuccessfullyWithoutAuthentication false); try (Connection connection = DriverManager .getConnection("jdbc:arrow-flight-sql://localhost:32010", properties)) { - assert connection.isValid(300); + Assert.assertTrue(connection.isValid(300)); } } @@ -271,14 +280,14 @@ public void testTLSConnectionPropertyFalseCorrectCastUrlWithDriverManager() thro final Driver driver = new ArrowFlightJdbcDriver(); DriverManager.registerDriver(driver); - Connection connection = DriverManager.getConnection( + try (Connection connection = DriverManager.getConnection( String.format( "jdbc:arrow-flight-sql://localhost:%s?user=%s&password=%s&useEncryption=false", FLIGHT_SERVER_TEST_RULE.getPort(), userTest, - passTest)); - Assert.assertTrue(connection.isValid(0)); - connection.close(); + passTest))) { + Assert.assertTrue(connection.isValid(0)); + } } /** @@ -301,13 +310,13 @@ public void testTLSConnectionPropertyFalseCorrectCastUrlAndPropertiesUsingSetPro passTest); properties.setProperty(ArrowFlightConnectionProperty.USE_ENCRYPTION.camelName(), "false"); - Connection connection = DriverManager.getConnection( + try (Connection connection = DriverManager.getConnection( String.format( "jdbc:arrow-flight-sql://localhost:%s", FLIGHT_SERVER_TEST_RULE.getPort()), - properties); - Assert.assertTrue(connection.isValid(0)); - connection.close(); + properties)) { + Assert.assertTrue(connection.isValid(0)); + } } /** @@ -329,13 +338,13 @@ public void testTLSConnectionPropertyFalseCorrectCastUrlAndPropertiesUsingPutWit passTest); properties.put(ArrowFlightConnectionProperty.USE_ENCRYPTION.camelName(), false); - Connection connection = DriverManager.getConnection( + try (Connection connection = DriverManager.getConnection( String.format( "jdbc:arrow-flight-sql://localhost:%s", FLIGHT_SERVER_TEST_RULE.getPort()), - properties); - Assert.assertTrue(connection.isValid(0)); - connection.close(); + properties)) { + Assert.assertTrue(connection.isValid(0)); + } } /** @@ -350,14 +359,14 @@ public void testTLSConnectionPropertyFalseIntegerCorrectCastUrlWithDriverManager final Driver driver = new ArrowFlightJdbcDriver(); DriverManager.registerDriver(driver); - Connection connection = DriverManager.getConnection( + try (Connection connection = DriverManager.getConnection( String.format( "jdbc:arrow-flight-sql://localhost:%s?user=%s&password=%s&useEncryption=0", FLIGHT_SERVER_TEST_RULE.getPort(), userTest, - passTest)); - Assert.assertTrue(connection.isValid(0)); - connection.close(); + passTest))) { + Assert.assertTrue(connection.isValid(0)); + } } /** @@ -380,13 +389,13 @@ public void testTLSConnectionPropertyFalseIntegerCorrectCastUrlAndPropertiesUsin passTest); properties.setProperty(ArrowFlightConnectionProperty.USE_ENCRYPTION.camelName(), "0"); - Connection connection = DriverManager.getConnection( + try (Connection connection = DriverManager.getConnection( String.format( "jdbc:arrow-flight-sql://localhost:%s", FLIGHT_SERVER_TEST_RULE.getPort()), - properties); - Assert.assertTrue(connection.isValid(0)); - connection.close(); + properties)) { + Assert.assertTrue(connection.isValid(0)); + } } /** @@ -409,13 +418,13 @@ public void testTLSConnectionPropertyFalseIntegerCorrectCastUrlAndPropertiesUsin passTest); properties.put(ArrowFlightConnectionProperty.USE_ENCRYPTION.camelName(), 0); - Connection connection = DriverManager.getConnection( + try (Connection connection = DriverManager.getConnection( String.format( "jdbc:arrow-flight-sql://localhost:%s", FLIGHT_SERVER_TEST_RULE.getPort()), - properties); - Assert.assertTrue(connection.isValid(0)); - connection.close(); + properties)) { + Assert.assertTrue(connection.isValid(0)); + } } /** @@ -430,15 +439,15 @@ public void testThreadPoolSizeConnectionPropertyCorrectCastUrlWithDriverManager( final Driver driver = new ArrowFlightJdbcDriver(); DriverManager.registerDriver(driver); - Connection connection = DriverManager.getConnection( + try (Connection connection = DriverManager.getConnection( String.format( "jdbc:arrow-flight-sql://localhost:%s?user=%s&password=%s&threadPoolSize=1&useEncryption=%s", FLIGHT_SERVER_TEST_RULE.getPort(), userTest, passTest, - false)); - Assert.assertTrue(connection.isValid(0)); - connection.close(); + false))) { + Assert.assertTrue(connection.isValid(0)); + } } /** @@ -462,13 +471,13 @@ public void testThreadPoolSizeConnectionPropertyCorrectCastUrlAndPropertiesUsing properties.setProperty(ArrowFlightConnectionProperty.THREAD_POOL_SIZE.camelName(), "1"); properties.put("useEncryption", false); - Connection connection = DriverManager.getConnection( + try (Connection connection = DriverManager.getConnection( String.format( "jdbc:arrow-flight-sql://localhost:%s", FLIGHT_SERVER_TEST_RULE.getPort()), - properties); - Assert.assertTrue(connection.isValid(0)); - connection.close(); + properties)) { + Assert.assertTrue(connection.isValid(0)); + } } /** @@ -492,13 +501,13 @@ public void testThreadPoolSizeConnectionPropertyCorrectCastUrlAndPropertiesUsing properties.put(ArrowFlightConnectionProperty.THREAD_POOL_SIZE.camelName(), 1); properties.put("useEncryption", false); - Connection connection = DriverManager.getConnection( + try (Connection connection = DriverManager.getConnection( String.format( "jdbc:arrow-flight-sql://localhost:%s", FLIGHT_SERVER_TEST_RULE.getPort()), - properties); - Assert.assertTrue(connection.isValid(0)); - connection.close(); + properties)) { + Assert.assertTrue(connection.isValid(0)); + } } /** @@ -513,15 +522,15 @@ public void testPasswordConnectionPropertyIntegerCorrectCastUrlWithDriverManager final Driver driver = new ArrowFlightJdbcDriver(); DriverManager.registerDriver(driver); - Connection connection = DriverManager.getConnection( + try (Connection connection = DriverManager.getConnection( String.format( "jdbc:arrow-flight-sql://localhost:%s?user=%s&password=%s&useEncryption=%s", FLIGHT_SERVER_TEST_RULE.getPort(), userTest, passTest, - false)); - Assert.assertTrue(connection.isValid(0)); - connection.close(); + false))) { + Assert.assertTrue(connection.isValid(0)); + } } /** @@ -544,13 +553,13 @@ public void testPasswordConnectionPropertyIntegerCorrectCastUrlAndPropertiesUsin passTest); properties.put("useEncryption", false); - Connection connection = DriverManager.getConnection( + try (Connection connection = DriverManager.getConnection( String.format( "jdbc:arrow-flight-sql://localhost:%s", FLIGHT_SERVER_TEST_RULE.getPort()), - properties); - Assert.assertTrue(connection.isValid(0)); - connection.close(); + properties)) { + Assert.assertTrue(connection.isValid(0)); + } } /** @@ -573,12 +582,12 @@ public void testPasswordConnectionPropertyIntegerCorrectCastUrlAndPropertiesUsin passTest); properties.put("useEncryption", false); - Connection connection = DriverManager.getConnection( + try (Connection connection = DriverManager.getConnection( String.format( "jdbc:arrow-flight-sql://localhost:%s", FLIGHT_SERVER_TEST_RULE.getPort()), - properties); - Assert.assertTrue(connection.isValid(0)); - connection.close(); + properties)) { + Assert.assertTrue(connection.isValid(0)); + } } } diff --git a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ConnectionTlsRootCertsTest.java b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ConnectionTlsRootCertsTest.java new file mode 100644 index 0000000000000..5579cf0cf5f54 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ConnectionTlsRootCertsTest.java @@ -0,0 +1,356 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThrows; + +import java.net.URLEncoder; +import java.sql.Connection; +import java.sql.Driver; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.util.Properties; + +import org.apache.arrow.driver.jdbc.authentication.UserPasswordAuthentication; +import org.apache.arrow.driver.jdbc.client.ArrowFlightSqlClientHandler; +import org.apache.arrow.driver.jdbc.utils.ArrowFlightConnectionConfigImpl.ArrowFlightConnectionProperty; +import org.apache.arrow.driver.jdbc.utils.FlightSqlTestCertificates; +import org.apache.arrow.driver.jdbc.utils.MockFlightSqlProducer; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.util.AutoCloseables; +import org.apache.calcite.avatica.org.apache.http.auth.UsernamePasswordCredentials; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; + + +/** + * Tests encrypted connections. + */ +public class ConnectionTlsRootCertsTest { + + @ClassRule + public static final FlightServerTestRule FLIGHT_SERVER_TEST_RULE; + private static final String tlsRootCertsPath; + private static final String badTlsRootCertsPath; + private static final MockFlightSqlProducer PRODUCER = new MockFlightSqlProducer(); + private static final String userTest = "user1"; + private static final String passTest = "pass1"; + + static { + final FlightSqlTestCertificates.CertKeyPair + certKey = FlightSqlTestCertificates.exampleTlsCerts().get(0); + + tlsRootCertsPath = certKey.cert.getPath(); + + badTlsRootCertsPath = certKey.cert.getPath() + ".bad"; + + UserPasswordAuthentication authentication = new UserPasswordAuthentication.Builder() + .user(userTest, passTest) + .build(); + + FLIGHT_SERVER_TEST_RULE = new FlightServerTestRule.Builder() + .authentication(authentication) + .useEncryption(certKey.cert, certKey.key) + .producer(PRODUCER) + .build(); + } + + private BufferAllocator allocator; + + @Before + public void setUp() throws Exception { + allocator = new RootAllocator(Long.MAX_VALUE); + } + + @After + public void tearDown() throws Exception { + allocator.getChildAllocators().forEach(BufferAllocator::close); + AutoCloseables.close(allocator); + } + + /** + * Try to instantiate an encrypted FlightClient. + * + * @throws Exception on error. + */ + @Test + public void testGetEncryptedClientAuthenticated() throws Exception { + final UsernamePasswordCredentials credentials = new UsernamePasswordCredentials( + userTest, passTest); + + try (ArrowFlightSqlClientHandler client = + new ArrowFlightSqlClientHandler.Builder() + .withHost(FLIGHT_SERVER_TEST_RULE.getHost()) + .withPort(FLIGHT_SERVER_TEST_RULE.getPort()) + .withUsername(credentials.getUserName()) + .withPassword(credentials.getPassword()) + .withTlsRootCertificates(tlsRootCertsPath) + .withBufferAllocator(allocator) + .withEncryption(true) + .build()) { + assertNotNull(client); + } + } + + /** + * Try to instantiate an encrypted FlightClient providing a bad TLS Root Certs Path. It's expected to + * receive the SQLException. + */ + @Test + public void testGetEncryptedClientWithNoCertificateOnKeyStore() { + assertThrows(SQLException.class, () -> { + try (ArrowFlightSqlClientHandler handler = new ArrowFlightSqlClientHandler.Builder() + .withHost(FLIGHT_SERVER_TEST_RULE.getHost()) + .withTlsRootCertificates(badTlsRootCertsPath) + .withBufferAllocator(allocator) + .withEncryption(true) + .build()) { + Assert.fail(); + } + }); + } + + /** + * Try to instantiate an encrypted FlightClient without credentials. + * + * @throws Exception on error. + */ + @Test + public void testGetNonAuthenticatedEncryptedClientNoAuth() throws Exception { + try (ArrowFlightSqlClientHandler client = + new ArrowFlightSqlClientHandler.Builder() + .withHost(FLIGHT_SERVER_TEST_RULE.getHost()) + .withTlsRootCertificates(tlsRootCertsPath) + .withBufferAllocator(allocator) + .withEncryption(true) + .build()) { + assertNotNull(client); + } + } + + /** + * Check if an encrypted connection can be established successfully when the + * provided valid credentials and a valid TLS Root Certs path. + * + * @throws Exception on error. + */ + @Test + public void testGetEncryptedConnectionWithValidCredentialsAndTlsRootsPath() throws Exception { + final Properties properties = new Properties(); + + properties.put(ArrowFlightConnectionProperty.HOST.camelName(), "localhost"); + properties.put(ArrowFlightConnectionProperty.PORT.camelName(), + FLIGHT_SERVER_TEST_RULE.getPort()); + properties.put(ArrowFlightConnectionProperty.USER.camelName(), + userTest); + properties.put(ArrowFlightConnectionProperty.PASSWORD.camelName(), + passTest); + properties.put(ArrowFlightConnectionProperty.TLS_ROOT_CERTS.camelName(), + tlsRootCertsPath); + + final ArrowFlightJdbcDataSource dataSource = + ArrowFlightJdbcDataSource.createNewDataSource(properties); + try (final Connection connection = dataSource.getConnection()) { + Assert.assertTrue(connection.isValid(300)); + } + } + + /** + * Check if an encrypted connection can be established successfully when not + * providing authentication. + * + * @throws Exception on error. + */ + @Test + public void testGetNonAuthenticatedEncryptedConnection() throws Exception { + final Properties properties = new Properties(); + + properties.put(ArrowFlightConnectionProperty.HOST.camelName(), FLIGHT_SERVER_TEST_RULE.getHost()); + properties.put(ArrowFlightConnectionProperty.PORT.camelName(), FLIGHT_SERVER_TEST_RULE.getPort()); + properties.put(ArrowFlightConnectionProperty.USE_ENCRYPTION.camelName(), true); + properties.put(ArrowFlightConnectionProperty.TLS_ROOT_CERTS.camelName(), tlsRootCertsPath); + + final ArrowFlightJdbcDataSource dataSource = ArrowFlightJdbcDataSource.createNewDataSource(properties); + try (final Connection connection = dataSource.getConnection()) { + Assert.assertTrue(connection.isValid(300)); + } + } + + /** + * Check if an encrypted connection can be established successfully when connecting through + * the DriverManager using just a connection url. + * + * @throws Exception on error. + */ + @Test + public void testTLSConnectionPropertyTrueCorrectCastUrlWithDriverManager() throws Exception { + final Driver driver = new ArrowFlightJdbcDriver(); + DriverManager.registerDriver(driver); + + try (final Connection connection = DriverManager.getConnection( + String.format( + "jdbc:arrow-flight-sql://localhost:%s?user=%s&password=%s" + + "&useEncryption=true&%s=%s", + FLIGHT_SERVER_TEST_RULE.getPort(), + userTest, + passTest, + ArrowFlightConnectionProperty.TLS_ROOT_CERTS.camelName(), + URLEncoder.encode(tlsRootCertsPath, "UTF-8")))) { + Assert.assertTrue(connection.isValid(0)); + } + } + + /** + * Check if an encrypted connection can be established successfully when connecting through the DriverManager using + * a connection url and properties with String K-V pairs. + * + * @throws Exception on error. + */ + @Test + public void testTLSConnectionPropertyTrueCorrectCastUrlAndPropertiesUsingSetPropertyWithDriverManager() + throws Exception { + final Driver driver = new ArrowFlightJdbcDriver(); + DriverManager.registerDriver(driver); + + Properties properties = new Properties(); + + properties.setProperty(ArrowFlightConnectionProperty.USER.camelName(), userTest); + properties.setProperty(ArrowFlightConnectionProperty.PASSWORD.camelName(), passTest); + properties.setProperty(ArrowFlightConnectionProperty.TLS_ROOT_CERTS.camelName(), tlsRootCertsPath); + properties.setProperty(ArrowFlightConnectionProperty.USE_ENCRYPTION.camelName(), "true"); + + try (final Connection connection = DriverManager.getConnection( + String.format( + "jdbc:arrow-flight-sql://localhost:%s", + FLIGHT_SERVER_TEST_RULE.getPort()), + properties)) { + Assert.assertTrue(connection.isValid(0)); + } + } + + /** + * Check if an encrypted connection can be established successfully when connecting through the DriverManager using + * a connection url and properties with Object K-V pairs. + * + * @throws Exception on error. + */ + @Test + public void testTLSConnectionPropertyTrueCorrectCastUrlAndPropertiesUsingPutWithDriverManager() + throws Exception { + final Driver driver = new ArrowFlightJdbcDriver(); + DriverManager.registerDriver(driver); + + Properties properties = new Properties(); + + properties.put(ArrowFlightConnectionProperty.USER.camelName(), userTest); + properties.put(ArrowFlightConnectionProperty.PASSWORD.camelName(), passTest); + properties.put(ArrowFlightConnectionProperty.USE_ENCRYPTION.camelName(), true); + properties.put(ArrowFlightConnectionProperty.TLS_ROOT_CERTS.camelName(), tlsRootCertsPath); + + try (final Connection connection = DriverManager.getConnection( + String.format( + "jdbc:arrow-flight-sql://localhost:%s", + FLIGHT_SERVER_TEST_RULE.getPort()), + properties)) { + Assert.assertTrue(connection.isValid(0)); + } + } + + /** + * Check if an encrypted connection can be established successfully when connecting through the DriverManager using + * just a connection url and using 0 and 1 as ssl values. + * + * @throws Exception on error. + */ + @Test + public void testTLSConnectionPropertyTrueIntegerCorrectCastUrlWithDriverManager() + throws Exception { + final Driver driver = new ArrowFlightJdbcDriver(); + DriverManager.registerDriver(driver); + + try (final Connection connection = DriverManager.getConnection( + String.format( + "jdbc:arrow-flight-sql://localhost:%s?user=%s&password=%s" + + "&useEncryption=1&useSystemTrustStore=0&%s=%s", + FLIGHT_SERVER_TEST_RULE.getPort(), + userTest, + passTest, + ArrowFlightConnectionProperty.TLS_ROOT_CERTS.camelName(), + URLEncoder.encode(tlsRootCertsPath, "UTF-8")))) { + Assert.assertTrue(connection.isValid(0)); + } + } + + /** + * Check if an encrypted connection can be established successfully when connecting through the DriverManager using + * a connection url and properties with String K-V pairs and using 0 and 1 as ssl values. + * + * @throws Exception on error. + */ + @Test + public void testTLSConnectionPropertyTrueIntegerCorrectCastUrlAndPropertiesUsingSetPropertyWithDriverManager() + throws Exception { + final Driver driver = new ArrowFlightJdbcDriver(); + DriverManager.registerDriver(driver); + + Properties properties = new Properties(); + + properties.setProperty(ArrowFlightConnectionProperty.USER.camelName(), userTest); + properties.setProperty(ArrowFlightConnectionProperty.PASSWORD.camelName(), passTest); + properties.setProperty(ArrowFlightConnectionProperty.TLS_ROOT_CERTS.camelName(), tlsRootCertsPath); + properties.setProperty(ArrowFlightConnectionProperty.USE_ENCRYPTION.camelName(), "1"); + + try (final Connection connection = DriverManager.getConnection( + String.format("jdbc:arrow-flight-sql://localhost:%s", FLIGHT_SERVER_TEST_RULE.getPort()), + properties)) { + Assert.assertTrue(connection.isValid(0)); + } + } + + /** + * Check if an encrypted connection can be established successfully when connecting through the DriverManager using + * a connection url and properties with Object K-V pairs and using 0 and 1 as ssl values. + * + * @throws Exception on error. + */ + @Test + public void testTLSConnectionPropertyTrueIntegerCorrectCastUrlAndPropertiesUsingPutWithDriverManager() + throws Exception { + final Driver driver = new ArrowFlightJdbcDriver(); + DriverManager.registerDriver(driver); + + Properties properties = new Properties(); + + properties.put(ArrowFlightConnectionProperty.USER.camelName(), userTest); + properties.put(ArrowFlightConnectionProperty.PASSWORD.camelName(), passTest); + properties.put(ArrowFlightConnectionProperty.USE_ENCRYPTION.camelName(), 1); + properties.put(ArrowFlightConnectionProperty.TLS_ROOT_CERTS.camelName(), tlsRootCertsPath); + + try (final Connection connection = DriverManager.getConnection( + String.format("jdbc:arrow-flight-sql://localhost:%s", + FLIGHT_SERVER_TEST_RULE.getPort()), + properties)) { + Assert.assertTrue(connection.isValid(0)); + } + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ConnectionTlsTest.java b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ConnectionTlsTest.java index 95d591766a836..7e160f3f0c385 100644 --- a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ConnectionTlsTest.java +++ b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ConnectionTlsTest.java @@ -127,6 +127,7 @@ public void testGetEncryptedClientAuthenticated() throws Exception { new ArrowFlightSqlClientHandler.Builder() .withHost(FLIGHT_SERVER_TEST_RULE.getHost()) .withPort(FLIGHT_SERVER_TEST_RULE.getPort()) + .withSystemTrustStore(false) .withUsername(credentials.getUserName()) .withPassword(credentials.getPassword()) .withTrustStorePath(trustStorePath) @@ -153,6 +154,7 @@ public void testGetEncryptedClientWithNoCertificateOnKeyStore() throws Exception .withHost(FLIGHT_SERVER_TEST_RULE.getHost()) .withTrustStorePath(noCertificateKeyStorePath) .withTrustStorePassword(noCertificateKeyStorePassword) + .withSystemTrustStore(false) .withBufferAllocator(allocator) .withEncryption(true) .build()) { @@ -170,6 +172,7 @@ public void testGetNonAuthenticatedEncryptedClientNoAuth() throws Exception { try (ArrowFlightSqlClientHandler client = new ArrowFlightSqlClientHandler.Builder() .withHost(FLIGHT_SERVER_TEST_RULE.getHost()) + .withSystemTrustStore(false) .withTrustStorePath(trustStorePath) .withTrustStorePassword(trustStorePass) .withBufferAllocator(allocator) @@ -192,6 +195,7 @@ public void testGetEncryptedClientWithKeyStoreBadPasswordAndNoAuth() throws Exce try (ArrowFlightSqlClientHandler ignored = new ArrowFlightSqlClientHandler.Builder() .withHost(FLIGHT_SERVER_TEST_RULE.getHost()) + .withSystemTrustStore(false) .withTrustStorePath(trustStorePath) .withTrustStorePassword(keyStoreBadPassword) .withBufferAllocator(allocator) @@ -225,7 +229,7 @@ public void testGetEncryptedConnectionWithValidCredentialsAndKeyStore() throws E final ArrowFlightJdbcDataSource dataSource = ArrowFlightJdbcDataSource.createNewDataSource(properties); try (final Connection connection = dataSource.getConnection()) { - assert connection.isValid(300); + Assert.assertTrue(connection.isValid(300)); } } @@ -276,7 +280,7 @@ public void testGetNonAuthenticatedEncryptedConnection() throws Exception { final ArrowFlightJdbcDataSource dataSource = ArrowFlightJdbcDataSource.createNewDataSource(properties); try (final Connection connection = dataSource.getConnection()) { - assert connection.isValid(300); + Assert.assertTrue(connection.isValid(300)); } } @@ -291,7 +295,7 @@ public void testTLSConnectionPropertyTrueCorrectCastUrlWithDriverManager() throw final Driver driver = new ArrowFlightJdbcDriver(); DriverManager.registerDriver(driver); - final Connection connection = DriverManager.getConnection( + try (final Connection connection = DriverManager.getConnection( String.format( "jdbc:arrow-flight-sql://localhost:%s?user=%s&password=%s" + "&useEncryption=true&useSystemTrustStore=false&%s=%s&%s=%s", @@ -301,9 +305,9 @@ public void testTLSConnectionPropertyTrueCorrectCastUrlWithDriverManager() throw ArrowFlightConnectionProperty.TRUST_STORE.camelName(), URLEncoder.encode(trustStorePath, "UTF-8"), ArrowFlightConnectionProperty.TRUST_STORE_PASSWORD.camelName(), - URLEncoder.encode(trustStorePass, "UTF-8"))); - Assert.assertTrue(connection.isValid(0)); - connection.close(); + URLEncoder.encode(trustStorePass, "UTF-8")))) { + Assert.assertTrue(connection.isValid(0)); + } } /** @@ -327,13 +331,13 @@ public void testTLSConnectionPropertyTrueCorrectCastUrlAndPropertiesUsingSetProp properties.setProperty(ArrowFlightConnectionProperty.USE_ENCRYPTION.camelName(), "true"); properties.setProperty(ArrowFlightConnectionProperty.USE_SYSTEM_TRUST_STORE.camelName(), "false"); - final Connection connection = DriverManager.getConnection( + try (final Connection connection = DriverManager.getConnection( String.format( "jdbc:arrow-flight-sql://localhost:%s", FLIGHT_SERVER_TEST_RULE.getPort()), - properties); - Assert.assertTrue(connection.isValid(0)); - connection.close(); + properties)) { + Assert.assertTrue(connection.isValid(0)); + } } /** @@ -357,13 +361,13 @@ public void testTLSConnectionPropertyTrueCorrectCastUrlAndPropertiesUsingPutWith properties.put(ArrowFlightConnectionProperty.TRUST_STORE.camelName(), trustStorePath); properties.put(ArrowFlightConnectionProperty.TRUST_STORE_PASSWORD.camelName(), trustStorePass); - final Connection connection = DriverManager.getConnection( + try (final Connection connection = DriverManager.getConnection( String.format( "jdbc:arrow-flight-sql://localhost:%s", FLIGHT_SERVER_TEST_RULE.getPort()), - properties); - Assert.assertTrue(connection.isValid(0)); - connection.close(); + properties)) { + Assert.assertTrue(connection.isValid(0)); + } } /** @@ -378,7 +382,7 @@ public void testTLSConnectionPropertyTrueIntegerCorrectCastUrlWithDriverManager( final Driver driver = new ArrowFlightJdbcDriver(); DriverManager.registerDriver(driver); - final Connection connection = DriverManager.getConnection( + try (final Connection connection = DriverManager.getConnection( String.format( "jdbc:arrow-flight-sql://localhost:%s?user=%s&password=%s" + "&useEncryption=1&useSystemTrustStore=0&%s=%s&%s=%s", @@ -388,9 +392,9 @@ public void testTLSConnectionPropertyTrueIntegerCorrectCastUrlWithDriverManager( ArrowFlightConnectionProperty.TRUST_STORE.camelName(), URLEncoder.encode(trustStorePath, "UTF-8"), ArrowFlightConnectionProperty.TRUST_STORE_PASSWORD.camelName(), - URLEncoder.encode(trustStorePass, "UTF-8"))); - Assert.assertTrue(connection.isValid(0)); - connection.close(); + URLEncoder.encode(trustStorePass, "UTF-8")))) { + Assert.assertTrue(connection.isValid(0)); + } } /** @@ -414,11 +418,11 @@ public void testTLSConnectionPropertyTrueIntegerCorrectCastUrlAndPropertiesUsing properties.setProperty(ArrowFlightConnectionProperty.USE_ENCRYPTION.camelName(), "1"); properties.setProperty(ArrowFlightConnectionProperty.USE_SYSTEM_TRUST_STORE.camelName(), "0"); - final Connection connection = DriverManager.getConnection( + try (final Connection connection = DriverManager.getConnection( String.format("jdbc:arrow-flight-sql://localhost:%s", FLIGHT_SERVER_TEST_RULE.getPort()), - properties); - Assert.assertTrue(connection.isValid(0)); - connection.close(); + properties)) { + Assert.assertTrue(connection.isValid(0)); + } } /** @@ -442,11 +446,11 @@ public void testTLSConnectionPropertyTrueIntegerCorrectCastUrlAndPropertiesUsing properties.put(ArrowFlightConnectionProperty.TRUST_STORE.camelName(), trustStorePath); properties.put(ArrowFlightConnectionProperty.TRUST_STORE_PASSWORD.camelName(), trustStorePass); - final Connection connection = DriverManager.getConnection( + try (final Connection connection = DriverManager.getConnection( String.format("jdbc:arrow-flight-sql://localhost:%s", FLIGHT_SERVER_TEST_RULE.getPort()), - properties); - Assert.assertTrue(connection.isValid(0)); - connection.close(); + properties)) { + Assert.assertTrue(connection.isValid(0)); + } } } diff --git a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/FlightServerTestRule.java b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/FlightServerTestRule.java index 733145892ec3e..39eb0a29866f1 100644 --- a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/FlightServerTestRule.java +++ b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/FlightServerTestRule.java @@ -55,6 +55,9 @@ * and interact with it. */ public class FlightServerTestRule implements TestRule, AutoCloseable { + public static final String DEFAULT_USER = "flight-test-user"; + public static final String DEFAULT_PASSWORD = "flight-test-password"; + private static final Logger LOGGER = LoggerFactory.getLogger(FlightServerTestRule.class); private final Properties properties; @@ -63,6 +66,7 @@ public class FlightServerTestRule implements TestRule, AutoCloseable { private final FlightSqlProducer producer; private final Authentication authentication; private final CertKeyPair certKeyPair; + private final File mTlsCACert; private final MiddlewareCookie.Factory middlewareCookieFactory = new MiddlewareCookie.Factory(); @@ -71,13 +75,15 @@ private FlightServerTestRule(final Properties properties, final BufferAllocator allocator, final FlightSqlProducer producer, final Authentication authentication, - final CertKeyPair certKeyPair) { + final CertKeyPair certKeyPair, + final File mTlsCACert) { this.properties = Preconditions.checkNotNull(properties); this.config = Preconditions.checkNotNull(config); this.allocator = Preconditions.checkNotNull(allocator); this.producer = Preconditions.checkNotNull(producer); this.authentication = authentication; this.certKeyPair = certKeyPair; + this.mTlsCACert = mTlsCACert; } /** @@ -89,7 +95,7 @@ private FlightServerTestRule(final Properties properties, public static FlightServerTestRule createStandardTestRule(final FlightSqlProducer producer) { UserPasswordAuthentication authentication = new UserPasswordAuthentication.Builder() - .user("flight-test-user", "flight-test-password") + .user(DEFAULT_USER, DEFAULT_PASSWORD) .build(); return new Builder() @@ -142,6 +148,9 @@ private FlightServer initiateServer(Location location) throws IOException { if (certKeyPair != null) { builder.useTls(certKeyPair.cert, certKeyPair.key); } + if (mTlsCACert != null) { + builder.useMTlsClientVerification(mTlsCACert); + } return builder.build(); } @@ -212,6 +221,7 @@ public static final class Builder { private FlightSqlProducer producer; private Authentication authentication; private CertKeyPair certKeyPair; + private File mTlsCACert; public Builder() { this.properties = new Properties(); @@ -254,6 +264,17 @@ public Builder useEncryption(final File certChain, final File key) { return this; } + /** + * Enable Client Verification via mTLS on the server. + * + * @param mTlsCACert The CA certificate to use for client verification. + * @return the Builder. + */ + public Builder useMTlsClientVerification(final File mTlsCACert) { + this.mTlsCACert = mTlsCACert; + return this; + } + /** * Builds the {@link FlightServerTestRule} using the provided values. * @@ -262,7 +283,7 @@ public Builder useEncryption(final File certChain, final File key) { public FlightServerTestRule build() { authentication.populateProperties(properties); return new FlightServerTestRule(properties, new ArrowFlightConnectionConfigImpl(properties), - new RootAllocator(Long.MAX_VALUE), producer, authentication, certKeyPair); + new RootAllocator(Long.MAX_VALUE), producer, authentication, certKeyPair, mTlsCACert); } } diff --git a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ResultSetTest.java b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ResultSetTest.java index b3002ec58416e..52910812fb4fb 100644 --- a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ResultSetTest.java +++ b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ResultSetTest.java @@ -19,6 +19,7 @@ import static java.lang.String.format; import static java.util.Collections.synchronizedSet; +import static org.apache.arrow.flight.Location.forGrpcInsecure; import static org.hamcrest.CoreMatchers.allOf; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.containsString; @@ -29,16 +30,32 @@ import static org.junit.Assert.fail; import java.sql.Connection; +import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.SQLTimeoutException; import java.sql.Statement; +import java.util.ArrayList; +import java.util.Arrays; import java.util.HashSet; +import java.util.List; import java.util.Random; import java.util.Set; import java.util.concurrent.CountDownLatch; import org.apache.arrow.driver.jdbc.utils.CoreMockedSqlProducers; +import org.apache.arrow.driver.jdbc.utils.PartitionedFlightSqlProducer; +import org.apache.arrow.flight.FlightEndpoint; +import org.apache.arrow.flight.FlightProducer; +import org.apache.arrow.flight.FlightServer; +import org.apache.arrow.flight.Ticket; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.Schema; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -144,9 +161,10 @@ public void testShouldRunSelectQuerySettingMaxRowLimit() throws Exception { @Test(expected = SQLException.class) public void testShouldThrowExceptionUponAttemptingToExecuteAnInvalidSelectQuery() throws Exception { - Statement statement = connection.createStatement(); - statement.executeQuery("SELECT * FROM SHOULD-FAIL"); - fail(); + try (Statement statement = connection.createStatement(); + ResultSet result = statement.executeQuery("SELECT * FROM SHOULD-FAIL")) { + fail(); + } } /** @@ -200,14 +218,15 @@ public void testColumnCountShouldRemainConsistentForResultSetThroughoutEntireDur */ @Test public void testShouldCloseStatementWhenIsCloseOnCompletion() throws Exception { - Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery(CoreMockedSqlProducers.LEGACY_REGULAR_SQL_CMD); + try (Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(CoreMockedSqlProducers.LEGACY_REGULAR_SQL_CMD)) { - statement.closeOnCompletion(); + statement.closeOnCompletion(); - resultSetNextUntilDone(resultSet); + resultSetNextUntilDone(resultSet); - collector.checkThat(statement.isClosed(), is(true)); + collector.checkThat(statement.isClosed(), is(true)); + } } /** @@ -218,16 +237,17 @@ public void testShouldCloseStatementWhenIsCloseOnCompletion() throws Exception { */ @Test public void testShouldCloseStatementWhenIsCloseOnCompletionWithMaxRowsLimit() throws Exception { - Statement statement = connection.createStatement(); - ResultSet resultSet = statement.executeQuery(CoreMockedSqlProducers.LEGACY_REGULAR_SQL_CMD); + try (Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(CoreMockedSqlProducers.LEGACY_REGULAR_SQL_CMD)) { - final long maxRowsLimit = 3; - statement.setLargeMaxRows(maxRowsLimit); - statement.closeOnCompletion(); + final long maxRowsLimit = 3; + statement.setLargeMaxRows(maxRowsLimit); + statement.closeOnCompletion(); - resultSetNextUntilDone(resultSet); + resultSetNextUntilDone(resultSet); - collector.checkThat(statement.isClosed(), is(true)); + collector.checkThat(statement.isClosed(), is(true)); + } } /** @@ -368,9 +388,85 @@ public void testFlightStreamsQueryShouldNotTimeout() throws SQLException { final int timeoutValue = 5; try (Statement statement = connection.createStatement()) { statement.setQueryTimeout(timeoutValue); - ResultSet resultSet = statement.executeQuery(query); - CoreMockedSqlProducers.assertLegacyRegularSqlResultSet(resultSet, collector); - resultSet.close(); + try (ResultSet resultSet = statement.executeQuery(query)) { + CoreMockedSqlProducers.assertLegacyRegularSqlResultSet(resultSet, collector); + } + } + } + + @Test + public void testPartitionedFlightServer() throws Exception { + // Arrange + final Schema schema = new Schema( + Arrays.asList(Field.nullablePrimitive("int_column", new ArrowType.Int(32, true)))); + try (BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); + VectorSchemaRoot firstPartition = VectorSchemaRoot.create(schema, allocator); + VectorSchemaRoot secondPartition = VectorSchemaRoot.create(schema, allocator)) { + firstPartition.setRowCount(1); + ((IntVector) firstPartition.getVector(0)).set(0, 1); + secondPartition.setRowCount(1); + ((IntVector) secondPartition.getVector(0)).set(0, 2); + + // Construct the data-only nodes first. + FlightProducer firstProducer = new PartitionedFlightSqlProducer.DataOnlyFlightSqlProducer( + new Ticket("first".getBytes()), firstPartition); + FlightProducer secondProducer = new PartitionedFlightSqlProducer.DataOnlyFlightSqlProducer( + new Ticket("second".getBytes()), secondPartition); + + final FlightServer.Builder firstBuilder = FlightServer.builder( + allocator, forGrpcInsecure("localhost", 0), firstProducer); + + final FlightServer.Builder secondBuilder = FlightServer.builder( + allocator, forGrpcInsecure("localhost", 0), secondProducer); + + // Run the data-only nodes so that we can get the Locations they are running at. + try (FlightServer firstServer = firstBuilder.build(); + FlightServer secondServer = secondBuilder.build()) { + firstServer.start(); + secondServer.start(); + final FlightEndpoint firstEndpoint = + new FlightEndpoint(new Ticket("first".getBytes()), firstServer.getLocation()); + + final FlightEndpoint secondEndpoint = + new FlightEndpoint(new Ticket("second".getBytes()), secondServer.getLocation()); + + // Finally start the root node. + try (final PartitionedFlightSqlProducer rootProducer = new PartitionedFlightSqlProducer( + schema, firstEndpoint, secondEndpoint); + FlightServer rootServer = FlightServer.builder( + allocator, forGrpcInsecure("localhost", 0), rootProducer) + .build() + .start(); + Connection newConnection = DriverManager.getConnection(String.format( + "jdbc:arrow-flight-sql://%s:%d/?useEncryption=false", + rootServer.getLocation().getUri().getHost(), rootServer.getPort())); + Statement newStatement = newConnection.createStatement(); + // Act + ResultSet result = newStatement.executeQuery("Select partitioned_data")) { + List resultData = new ArrayList<>(); + while (result.next()) { + resultData.add(result.getInt(1)); + } + + // Assert + assertEquals(firstPartition.getRowCount() + secondPartition.getRowCount(), resultData.size()); + assertTrue(resultData.contains(((IntVector) firstPartition.getVector(0)).get(0))); + assertTrue(resultData.contains(((IntVector) secondPartition.getVector(0)).get(0))); + } + } + } + } + + @Test + public void testShouldRunSelectQueryWithEmptyVectorsEmbedded() throws Exception { + try (Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery( + CoreMockedSqlProducers.LEGACY_REGULAR_WITH_EMPTY_SQL_CMD)) { + long rowCount = 0; + while (resultSet.next()) { + ++rowCount; + } + assertEquals(2, rowCount); } } } diff --git a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/accessor/impl/calendar/ArrowFlightJdbcIntervalVectorAccessorTest.java b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/accessor/impl/calendar/ArrowFlightJdbcIntervalVectorAccessorTest.java index ea228692202a7..322b7d40bd6e1 100644 --- a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/accessor/impl/calendar/ArrowFlightJdbcIntervalVectorAccessorTest.java +++ b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/accessor/impl/calendar/ArrowFlightJdbcIntervalVectorAccessorTest.java @@ -21,7 +21,6 @@ import static org.apache.arrow.driver.jdbc.utils.IntervalStringUtils.formatIntervalYear; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; -import static org.joda.time.Period.parse; import java.time.Duration; import java.time.Period; @@ -142,57 +141,62 @@ private String getStringOnVector(ValueVector vector, int index) { if (object == null) { return null; } else if (vector instanceof IntervalDayVector) { - return formatIntervalDay(parse(object)); + return formatIntervalDay(Duration.parse(object)); } else if (vector instanceof IntervalYearVector) { - return formatIntervalYear(parse(object)); + return formatIntervalYear(Period.parse(object)); } return null; } @Test public void testShouldGetIntervalYear( ) { - Assert.assertEquals("-002-00", formatIntervalYear(parse("P-2Y"))); - Assert.assertEquals("-001-01", formatIntervalYear(parse("P-1Y-1M"))); - Assert.assertEquals("-001-02", formatIntervalYear(parse("P-1Y-2M"))); - Assert.assertEquals("-002-03", formatIntervalYear(parse("P-2Y-3M"))); - Assert.assertEquals("-002-04", formatIntervalYear(parse("P-2Y-4M"))); - Assert.assertEquals("-011-01", formatIntervalYear(parse("P-11Y-1M"))); - Assert.assertEquals("+002-00", formatIntervalYear(parse("P+2Y"))); - Assert.assertEquals("+001-01", formatIntervalYear(parse("P+1Y1M"))); - Assert.assertEquals("+001-02", formatIntervalYear(parse("P+1Y2M"))); - Assert.assertEquals("+002-03", formatIntervalYear(parse("P+2Y3M"))); - Assert.assertEquals("+002-04", formatIntervalYear(parse("P+2Y4M"))); - Assert.assertEquals("+011-01", formatIntervalYear(parse("P+11Y1M"))); + Assert.assertEquals("-002-00", formatIntervalYear(Period.parse("P-2Y"))); + Assert.assertEquals("-001-01", formatIntervalYear(Period.parse("P-1Y-1M"))); + Assert.assertEquals("-001-02", formatIntervalYear(Period.parse("P-1Y-2M"))); + Assert.assertEquals("-002-03", formatIntervalYear(Period.parse("P-2Y-3M"))); + Assert.assertEquals("-002-04", formatIntervalYear(Period.parse("P-2Y-4M"))); + Assert.assertEquals("-011-01", formatIntervalYear(Period.parse("P-11Y-1M"))); + Assert.assertEquals("+002-00", formatIntervalYear(Period.parse("P+2Y"))); + Assert.assertEquals("+001-01", formatIntervalYear(Period.parse("P+1Y1M"))); + Assert.assertEquals("+001-02", formatIntervalYear(Period.parse("P+1Y2M"))); + Assert.assertEquals("+002-03", formatIntervalYear(Period.parse("P+2Y3M"))); + Assert.assertEquals("+002-04", formatIntervalYear(Period.parse("P+2Y4M"))); + Assert.assertEquals("+011-01", formatIntervalYear(Period.parse("P+11Y1M"))); } @Test public void testShouldGetIntervalDay( ) { - Assert.assertEquals("-001 00:00:00.000", formatIntervalDay(parse("PT-24H"))); - Assert.assertEquals("+001 00:00:00.000", formatIntervalDay(parse("PT+24H"))); - Assert.assertEquals("-000 01:00:00.000", formatIntervalDay(parse("PT-1H"))); - Assert.assertEquals("-000 01:00:00.001", formatIntervalDay(parse("PT-1H-0M-00.001S"))); - Assert.assertEquals("-000 01:01:01.000", formatIntervalDay(parse("PT-1H-1M-1S"))); - Assert.assertEquals("-000 02:02:02.002", formatIntervalDay(parse("PT-2H-2M-02.002S"))); - Assert.assertEquals("-000 23:59:59.999", formatIntervalDay(parse("PT-23H-59M-59.999S"))); - Assert.assertEquals("-000 11:59:00.100", formatIntervalDay(parse("PT-11H-59M-00.100S"))); - Assert.assertEquals("-000 05:02:03.000", formatIntervalDay(parse("PT-5H-2M-3S"))); - Assert.assertEquals("-000 22:22:22.222", formatIntervalDay(parse("PT-22H-22M-22.222S"))); - Assert.assertEquals("+000 01:00:00.000", formatIntervalDay(parse("PT+1H"))); - Assert.assertEquals("+000 01:00:00.001", formatIntervalDay(parse("PT+1H0M00.001S"))); - Assert.assertEquals("+000 01:01:01.000", formatIntervalDay(parse("PT+1H1M1S"))); - Assert.assertEquals("+000 02:02:02.002", formatIntervalDay(parse("PT+2H2M02.002S"))); - Assert.assertEquals("+000 23:59:59.999", formatIntervalDay(parse("PT+23H59M59.999S"))); - Assert.assertEquals("+000 11:59:00.100", formatIntervalDay(parse("PT+11H59M00.100S"))); - Assert.assertEquals("+000 05:02:03.000", formatIntervalDay(parse("PT+5H2M3S"))); - Assert.assertEquals("+000 22:22:22.222", formatIntervalDay(parse("PT+22H22M22.222S"))); + Assert.assertEquals("-001 00:00:00.000", formatIntervalDay(Duration.parse("PT-24H"))); + Assert.assertEquals("+001 00:00:00.000", formatIntervalDay(Duration.parse("PT+24H"))); + Assert.assertEquals("-000 01:00:00.000", formatIntervalDay(Duration.parse("PT-1H"))); + // "JDK-8054978: java.time.Duration.parse() fails for negative duration with 0 seconds and nanos" not fixed on JDK8 + //Assert.assertEquals("-000 01:00:00.001", formatIntervalDay(Duration.parse("PT-1H-0M-00.001S"))); + Assert.assertEquals("-000 01:00:00.001", formatIntervalDay(Duration.ofHours(-1).minusMillis(1))); + Assert.assertEquals("-000 01:01:01.000", formatIntervalDay(Duration.parse("PT-1H-1M-1S"))); + Assert.assertEquals("-000 02:02:02.002", formatIntervalDay(Duration.parse("PT-2H-2M-02.002S"))); + Assert.assertEquals("-000 23:59:59.999", formatIntervalDay(Duration.parse("PT-23H-59M-59.999S"))); + // "JDK-8054978: java.time.Duration.parse() fails for negative duration with 0 seconds and nanos" not fixed on JDK8 + //Assert.assertEquals("-000 11:59:00.100", formatIntervalDay(Duration.parse("PT-11H-59M-00.100S"))); + Assert.assertEquals("-000 11:59:00.100", + formatIntervalDay(Duration.ofHours(-11).minusMinutes(59).minusMillis(100))); + Assert.assertEquals("-000 05:02:03.000", formatIntervalDay(Duration.parse("PT-5H-2M-3S"))); + Assert.assertEquals("-000 22:22:22.222", formatIntervalDay(Duration.parse("PT-22H-22M-22.222S"))); + Assert.assertEquals("+000 01:00:00.000", formatIntervalDay(Duration.parse("PT+1H"))); + Assert.assertEquals("+000 01:00:00.001", formatIntervalDay(Duration.parse("PT+1H0M00.001S"))); + Assert.assertEquals("+000 01:01:01.000", formatIntervalDay(Duration.parse("PT+1H1M1S"))); + Assert.assertEquals("+000 02:02:02.002", formatIntervalDay(Duration.parse("PT+2H2M02.002S"))); + Assert.assertEquals("+000 23:59:59.999", formatIntervalDay(Duration.parse("PT+23H59M59.999S"))); + Assert.assertEquals("+000 11:59:00.100", formatIntervalDay(Duration.parse("PT+11H59M00.100S"))); + Assert.assertEquals("+000 05:02:03.000", formatIntervalDay(Duration.parse("PT+5H2M3S"))); + Assert.assertEquals("+000 22:22:22.222", formatIntervalDay(Duration.parse("PT+22H22M22.222S"))); } @Test public void testIntervalDayWithJodaPeriodObject() { Assert.assertEquals("+1567 00:00:00.000", - formatIntervalDay(new org.joda.time.Period().plusDays(1567))); + formatIntervalDay(Duration.ofDays(1567))); Assert.assertEquals("-1567 00:00:00.000", - formatIntervalDay(new org.joda.time.Period().minusDays(1567))); + formatIntervalDay(Duration.ofDays(-1567))); } @Test diff --git a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/client/ArrowFlightSqlClientHandlerBuilderTest.java b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/client/ArrowFlightSqlClientHandlerBuilderTest.java new file mode 100644 index 0000000000000..6565a85ddf99f --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/client/ArrowFlightSqlClientHandlerBuilderTest.java @@ -0,0 +1,143 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.client; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotSame; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; + +import org.apache.arrow.driver.jdbc.FlightServerTestRule; +import org.apache.arrow.driver.jdbc.utils.CoreMockedSqlProducers; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.memory.RootAllocator; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; + +/** + * Test the behavior of ArrowFlightSqlClientHandler.Builder + */ +public class ArrowFlightSqlClientHandlerBuilderTest { + @ClassRule + public static final FlightServerTestRule FLIGHT_SERVER_TEST_RULE = FlightServerTestRule + .createStandardTestRule(CoreMockedSqlProducers.getLegacyProducer()); + + private static BufferAllocator allocator; + + @BeforeClass + public static void setup() { + allocator = new RootAllocator(Long.MAX_VALUE); + } + + @AfterClass + public static void tearDown() { + allocator.close(); + } + + @Test + public void testRetainCookiesOnAuthOff() throws Exception { + // Arrange + final ArrowFlightSqlClientHandler.Builder rootBuilder = new ArrowFlightSqlClientHandler.Builder() + .withHost(FLIGHT_SERVER_TEST_RULE.getHost()) + .withPort(FLIGHT_SERVER_TEST_RULE.getPort()) + .withBufferAllocator(allocator) + .withUsername(FlightServerTestRule.DEFAULT_USER) + .withPassword(FlightServerTestRule.DEFAULT_PASSWORD) + .withEncryption(false) + .withRetainCookies(true) + .withRetainAuth(false); + + try (ArrowFlightSqlClientHandler rootHandler = rootBuilder.build()) { + // Act + final ArrowFlightSqlClientHandler.Builder testBuilder = new ArrowFlightSqlClientHandler.Builder(rootBuilder); + + // Assert + assertSame(rootBuilder.cookieFactory, testBuilder.cookieFactory); + assertNotSame(rootBuilder.authFactory, testBuilder.authFactory); + } + } + + @Test + public void testRetainCookiesOffAuthOff() throws Exception { + // Arrange + final ArrowFlightSqlClientHandler.Builder rootBuilder = new ArrowFlightSqlClientHandler.Builder() + .withHost(FLIGHT_SERVER_TEST_RULE.getHost()) + .withPort(FLIGHT_SERVER_TEST_RULE.getPort()) + .withBufferAllocator(allocator) + .withUsername(FlightServerTestRule.DEFAULT_USER) + .withPassword(FlightServerTestRule.DEFAULT_PASSWORD) + .withEncryption(false) + .withRetainCookies(false) + .withRetainAuth(false); + + try (ArrowFlightSqlClientHandler rootHandler = rootBuilder.build()) { + // Act + final ArrowFlightSqlClientHandler.Builder testBuilder = new ArrowFlightSqlClientHandler.Builder(rootBuilder); + + // Assert + assertNotSame(rootBuilder.cookieFactory, testBuilder.cookieFactory); + assertNotSame(rootBuilder.authFactory, testBuilder.authFactory); + } + } + + @Test + public void testRetainCookiesOnAuthOn() throws Exception { + // Arrange + final ArrowFlightSqlClientHandler.Builder rootBuilder = new ArrowFlightSqlClientHandler.Builder() + .withHost(FLIGHT_SERVER_TEST_RULE.getHost()) + .withPort(FLIGHT_SERVER_TEST_RULE.getPort()) + .withBufferAllocator(allocator) + .withUsername(FlightServerTestRule.DEFAULT_USER) + .withPassword(FlightServerTestRule.DEFAULT_PASSWORD) + .withEncryption(false) + .withRetainCookies(true) + .withRetainAuth(true); + + try (ArrowFlightSqlClientHandler rootHandler = rootBuilder.build()) { + // Act + final ArrowFlightSqlClientHandler.Builder testBuilder = new ArrowFlightSqlClientHandler.Builder(rootBuilder); + + // Assert + assertSame(rootBuilder.cookieFactory, testBuilder.cookieFactory); + assertSame(rootBuilder.authFactory, testBuilder.authFactory); + } + } + + @Test + public void testDefaults() { + final ArrowFlightSqlClientHandler.Builder builder = new ArrowFlightSqlClientHandler.Builder(); + + // Validate all non-mandatory fields against defaults in ArrowFlightConnectionProperty. + assertNull(builder.username); + assertNull(builder.password); + assertTrue(builder.useEncryption); + assertFalse(builder.disableCertificateVerification); + assertNull(builder.trustStorePath); + assertNull(builder.trustStorePassword); + assertTrue(builder.useSystemTrustStore); + assertNull(builder.token); + assertTrue(builder.retainAuth); + assertTrue(builder.retainCookies); + assertNull(builder.tlsRootCertificatesPath); + assertNull(builder.clientCertificatePath); + assertNull(builder.clientKeyPath); + } +} diff --git a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/CoreMockedSqlProducers.java b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/CoreMockedSqlProducers.java index cf359849a7105..a8e2e7f2e4ce5 100644 --- a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/CoreMockedSqlProducers.java +++ b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/CoreMockedSqlProducers.java @@ -64,6 +64,7 @@ public final class CoreMockedSqlProducers { public static final String LEGACY_REGULAR_SQL_CMD = "SELECT * FROM TEST"; public static final String LEGACY_METADATA_SQL_CMD = "SELECT * FROM METADATA"; public static final String LEGACY_CANCELLATION_SQL_CMD = "SELECT * FROM TAKES_FOREVER"; + public static final String LEGACY_REGULAR_WITH_EMPTY_SQL_CMD = "SELECT * FROM TEST_EMPTIES"; private CoreMockedSqlProducers() { // Prevent instantiation. @@ -80,9 +81,44 @@ public static MockFlightSqlProducer getLegacyProducer() { addLegacyRegularSqlCmdSupport(producer); addLegacyMetadataSqlCmdSupport(producer); addLegacyCancellationSqlCmdSupport(producer); + addQueryWithEmbeddedEmptyRoot(producer); return producer; } + private static void addQueryWithEmbeddedEmptyRoot(final MockFlightSqlProducer producer) { + final Schema querySchema = new Schema(ImmutableList.of( + new Field( + "ID", + new FieldType(true, new ArrowType.Int(64, true), + null), + null) + )); + + final List> resultProducers = new ArrayList<>(); + Consumer dataRoot = listener -> { + try (final BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); + final VectorSchemaRoot root = VectorSchemaRoot.create(querySchema, allocator)) { + root.allocateNew(); + root.setRowCount(0); + listener.start(root); + listener.putNext(); // empty root + ((BigIntVector) root.getVector("ID")).setSafe(0, 100L); + root.setRowCount(1); + listener.putNext(); // data root + root.clear(); + root.setRowCount(0); + listener.putNext(); // empty root + ((BigIntVector) root.getVector("ID")).setSafe(0, 100L); + root.setRowCount(1); + listener.putNext(); // data root + } finally { + listener.completed(); + } + }; + resultProducers.add(dataRoot); + producer.addSelectQuery(LEGACY_REGULAR_WITH_EMPTY_SQL_CMD, querySchema, resultProducers); + } + private static void addLegacyRegularSqlCmdSupport(final MockFlightSqlProducer producer) { final Schema querySchema = new Schema(ImmutableList.of( new Field( diff --git a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/FlightStreamQueueTest.java b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/FlightEndpointDataQueueTest.java similarity index 85% rename from java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/FlightStreamQueueTest.java rename to java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/FlightEndpointDataQueueTest.java index b474da55a7f1f..05325faa18ef3 100644 --- a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/FlightStreamQueueTest.java +++ b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/FlightEndpointDataQueueTest.java @@ -23,7 +23,7 @@ import java.util.concurrent.CompletionService; -import org.apache.arrow.flight.FlightStream; +import org.apache.arrow.driver.jdbc.client.CloseableEndpointStreamPair; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -33,20 +33,20 @@ import org.mockito.junit.MockitoJUnitRunner; /** - * Tests for {@link FlightStreamQueue}. + * Tests for {@link FlightEndpointDataQueue}. */ @RunWith(MockitoJUnitRunner.class) -public class FlightStreamQueueTest { +public class FlightEndpointDataQueueTest { @Rule public final ErrorCollector collector = new ErrorCollector(); @Mock - private CompletionService mockedService; - private FlightStreamQueue queue; + private CompletionService mockedService; + private FlightEndpointDataQueue queue; @Before public void setUp() { - queue = new FlightStreamQueue(mockedService); + queue = new FlightEndpointDataQueue(mockedService); } @Test @@ -64,7 +64,7 @@ public void testNextShouldThrowExceptionUponClose() throws Exception { public void testEnqueueShouldThrowExceptionUponClose() throws Exception { queue.close(); ThrowableAssertionUtils.simpleAssertThrowableClass(IllegalStateException.class, - () -> queue.enqueue(mock(FlightStream.class))); + () -> queue.enqueue(mock(CloseableEndpointStreamPair.class))); } @Test diff --git a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/FlightSqlTestCertificates.java b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/FlightSqlTestCertificates.java index a2b1864c02657..89e27be9f53da 100644 --- a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/FlightSqlTestCertificates.java +++ b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/FlightSqlTestCertificates.java @@ -51,6 +51,16 @@ static Path getFlightTestDataRoot() { return getTestDataRoot().resolve("flight"); } + /** + * Create File object with the CA certificate. + * + * @return A File containing the Root CA certificate. + */ + public static File exampleCACert() { + final Path root = getFlightTestDataRoot(); + return root.resolve("root-ca.pem").toFile(); + } + /** * Create CertKeyPair object with the certificates and keys. * diff --git a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/MockFlightSqlProducer.java b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/MockFlightSqlProducer.java index 0299eeb46d93b..2b65f8f5a07ba 100644 --- a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/MockFlightSqlProducer.java +++ b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/MockFlightSqlProducer.java @@ -34,6 +34,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; import java.util.UUID; import java.util.function.BiConsumer; import java.util.function.Consumer; @@ -75,6 +76,7 @@ import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.memory.RootAllocator; import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.VectorSchemaRoot; import org.apache.arrow.vector.ipc.WriteChannel; import org.apache.arrow.vector.ipc.message.MessageSerializer; import org.apache.arrow.vector.types.pojo.Schema; @@ -97,12 +99,14 @@ public final class MockFlightSqlProducer implements FlightSqlProducer { private final Map>> updateResultProviders = new HashMap<>(); - private SqlInfoBuilder sqlInfoBuilder = new SqlInfoBuilder(); + private final SqlInfoBuilder sqlInfoBuilder = new SqlInfoBuilder(); + private final Map parameterSchemas = new HashMap<>(); + private final Map>> expectedParameterValues = new HashMap<>(); private final Map actionTypeCounter = new HashMap<>(); - private static FlightInfo getFightInfoExportedAndImportedKeys(final Message message, - final FlightDescriptor descriptor) { + private static FlightInfo getFlightInfoExportedAndImportedKeys(final Message message, + final FlightDescriptor descriptor) { return getFlightInfo(message, Schemas.GET_IMPORTED_KEYS_SCHEMA, descriptor); } @@ -192,6 +196,12 @@ void addUpdateQuery(final String sqlCommand, format("Attempted to overwrite pre-existing query: <%s>.", sqlCommand)); } + /** Registers parameters expected to be provided with a prepared statement. */ + public void addExpectedParameters(String query, Schema parameterSchema, List> expectedValues) { + parameterSchemas.put(query, parameterSchema); + expectedParameterValues.put(query, expectedValues); + } + @Override public void createPreparedStatement(final ActionCreatePreparedStatementRequest request, final CallContext callContext, @@ -223,6 +233,13 @@ public void createPreparedStatement(final ActionCreatePreparedStatementRequest r return; } + final Schema parameterSchema = parameterSchemas.get(query); + if (parameterSchema != null) { + final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + MessageSerializer.serialize(new WriteChannel(Channels.newChannel(outputStream)), parameterSchema); + resultBuilder.setParameterSchema(ByteString.copyFrom(outputStream.toByteArray())); + } + listener.onNext(new Result(pack(resultBuilder.build()).toByteArray())); } catch (final Throwable t) { listener.onError(t); @@ -330,6 +347,51 @@ public Runnable acceptPutStatement(final CommandStatementUpdate commandStatement }; } + private boolean validateParameters(String query, + FlightStream flightStream, + StreamListener streamListener) { + final List> expectedValues = expectedParameterValues.get(query); + if (expectedValues != null) { + int index = 0; + while (flightStream.next()) { + final VectorSchemaRoot root = flightStream.getRoot(); + for (int i = 0; i < root.getRowCount(); i++) { + if (index >= expectedValues.size()) { + streamListener.onError(CallStatus.INVALID_ARGUMENT + .withDescription("More parameter rows provided than expected") + .toRuntimeException()); + return true; + } + List expectedRow = expectedValues.get(index++); + if (root.getFieldVectors().size() != expectedRow.size()) { + streamListener.onError(CallStatus.INVALID_ARGUMENT + .withDescription("Parameter count mismatch") + .toRuntimeException()); + return true; + } + + for (int paramIndex = 0; paramIndex < expectedRow.size(); paramIndex++) { + Object expected = expectedRow.get(paramIndex); + Object actual = root.getVector(paramIndex).getObject(i); + if (!Objects.equals(expected, actual)) { + streamListener.onError(CallStatus.INVALID_ARGUMENT + .withDescription(String.format("Parameter mismatch. Expected: %s Actual: %s", expected, actual)) + .toRuntimeException()); + return true; + } + } + } + } + if (index < expectedValues.size()) { + streamListener.onError(CallStatus.INVALID_ARGUMENT + .withDescription("Fewer parameter rows provided than expected") + .toRuntimeException()); + return true; + } + } + return false; + } + @Override public Runnable acceptPutPreparedStatementUpdate( final CommandPreparedStatementUpdate commandPreparedStatementUpdate, @@ -339,6 +401,11 @@ public Runnable acceptPutPreparedStatementUpdate( final String query = Preconditions.checkNotNull( preparedStatements.get(handle), format("No query registered under handle: <%s>.", handle)); + + if (validateParameters(query, flightStream, streamListener)) { + return () -> { }; + } + return acceptPutStatement( CommandStatementUpdate.newBuilder().setQuery(query).build(), callContext, flightStream, streamListener); @@ -349,8 +416,16 @@ public Runnable acceptPutPreparedStatementQuery( final CommandPreparedStatementQuery commandPreparedStatementQuery, final CallContext callContext, final FlightStream flightStream, final StreamListener streamListener) { - // TODO Implement this method. - throw CallStatus.UNIMPLEMENTED.toRuntimeException(); + final ByteString handle = commandPreparedStatementQuery.getPreparedStatementHandle(); + final String query = Preconditions.checkNotNull( + preparedStatements.get(handle), + format("No query registered under handle: <%s>.", handle)); + + if (validateParameters(query, flightStream, streamListener)) { + return () -> { }; + } + + return streamListener::onCompleted; } @Override @@ -454,14 +529,14 @@ public void getStreamPrimaryKeys(final CommandGetPrimaryKeys commandGetPrimaryKe public FlightInfo getFlightInfoExportedKeys(final CommandGetExportedKeys commandGetExportedKeys, final CallContext callContext, final FlightDescriptor flightDescriptor) { - return getFightInfoExportedAndImportedKeys(commandGetExportedKeys, flightDescriptor); + return getFlightInfoExportedAndImportedKeys(commandGetExportedKeys, flightDescriptor); } @Override public FlightInfo getFlightInfoImportedKeys(final CommandGetImportedKeys commandGetImportedKeys, final CallContext callContext, final FlightDescriptor flightDescriptor) { - return getFightInfoExportedAndImportedKeys(commandGetImportedKeys, flightDescriptor); + return getFlightInfoExportedAndImportedKeys(commandGetImportedKeys, flightDescriptor); } @Override @@ -469,7 +544,7 @@ public FlightInfo getFlightInfoCrossReference( final CommandGetCrossReference commandGetCrossReference, final CallContext callContext, final FlightDescriptor flightDescriptor) { - return getFightInfoExportedAndImportedKeys(commandGetCrossReference, flightDescriptor); + return getFlightInfoExportedAndImportedKeys(commandGetCrossReference, flightDescriptor); } @Override diff --git a/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/PartitionedFlightSqlProducer.java b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/PartitionedFlightSqlProducer.java new file mode 100644 index 0000000000000..3230ce626fac6 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/utils/PartitionedFlightSqlProducer.java @@ -0,0 +1,115 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.driver.jdbc.utils; + +import static com.google.protobuf.Any.pack; + +import java.util.Arrays; +import java.util.List; + +import org.apache.arrow.flight.CallStatus; +import org.apache.arrow.flight.FlightDescriptor; +import org.apache.arrow.flight.FlightEndpoint; +import org.apache.arrow.flight.FlightInfo; +import org.apache.arrow.flight.NoOpFlightProducer; +import org.apache.arrow.flight.Result; +import org.apache.arrow.flight.Ticket; +import org.apache.arrow.flight.sql.BasicFlightSqlProducer; +import org.apache.arrow.flight.sql.impl.FlightSql; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.types.pojo.Schema; + +import com.google.protobuf.ByteString; +import com.google.protobuf.Message; + +public class PartitionedFlightSqlProducer extends BasicFlightSqlProducer { + + /** + * A minimal FlightProducer intended to just serve data when given the correct Ticket. + */ + public static class DataOnlyFlightSqlProducer extends NoOpFlightProducer { + private final Ticket ticket; + private final VectorSchemaRoot data; + + public DataOnlyFlightSqlProducer(Ticket ticket, VectorSchemaRoot data) { + this.ticket = ticket; + this.data = data; + } + + @Override + public void getStream(CallContext context, Ticket ticket, ServerStreamListener listener) { + if (!Arrays.equals(ticket.getBytes(), this.ticket.getBytes())) { + listener.error(CallStatus.INVALID_ARGUMENT.withDescription("Illegal ticket.").toRuntimeException()); + return; + } + + listener.start(data); + listener.putNext(); + listener.completed(); + } + } + + private final List endpoints; + + private final Schema schema; + + public PartitionedFlightSqlProducer(Schema schema, FlightEndpoint... endpoints) { + this.schema = schema; + this.endpoints = Arrays.asList(endpoints); + } + + @Override + protected List determineEndpoints( + T request, FlightDescriptor flightDescriptor, Schema schema) { + return endpoints; + } + + @Override + public void createPreparedStatement(FlightSql.ActionCreatePreparedStatementRequest request, + CallContext context, StreamListener listener) { + final FlightSql.ActionCreatePreparedStatementResult.Builder resultBuilder = + FlightSql.ActionCreatePreparedStatementResult.newBuilder() + .setPreparedStatementHandle(ByteString.EMPTY); + + final ByteString datasetSchemaBytes = ByteString.copyFrom(schema.serializeAsMessage()); + + resultBuilder.setDatasetSchema(datasetSchemaBytes); + listener.onNext(new Result(pack(resultBuilder.build()).toByteArray())); + listener.onCompleted(); + } + + @Override + public FlightInfo getFlightInfoStatement( + FlightSql.CommandStatementQuery command, CallContext context, FlightDescriptor descriptor) { + return FlightInfo.builder(schema, descriptor, endpoints).build(); + } + + @Override + public FlightInfo getFlightInfoPreparedStatement(FlightSql.CommandPreparedStatementQuery command, + CallContext context, FlightDescriptor descriptor) { + return FlightInfo.builder(schema, descriptor, endpoints).build(); + } + + @Override + public void closePreparedStatement(FlightSql.ActionClosePreparedStatementRequest request, + CallContext context, StreamListener listener) { + listener.onCompleted(); + } + + // Note -- getStream() is intentionally not implemented. +} diff --git a/java/flight/flight-sql-jdbc-driver/pom.xml b/java/flight/flight-sql-jdbc-driver/pom.xml index 853f1e12274b9..263538ba48b37 100644 --- a/java/flight/flight-sql-jdbc-driver/pom.xml +++ b/java/flight/flight-sql-jdbc-driver/pom.xml @@ -16,7 +16,7 @@ arrow-flight org.apache.arrow - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT ../pom.xml 4.0.0 diff --git a/java/flight/flight-sql/pom.xml b/java/flight/flight-sql/pom.xml index 573eb676fd834..09100d9731ad5 100644 --- a/java/flight/flight-sql/pom.xml +++ b/java/flight/flight-sql/pom.xml @@ -14,7 +14,7 @@ arrow-flight org.apache.arrow - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT ../pom.xml diff --git a/java/flight/flight-sql/src/main/java/org/apache/arrow/flight/sql/FlightSqlClient.java b/java/flight/flight-sql/src/main/java/org/apache/arrow/flight/sql/FlightSqlClient.java index e72354513013e..93d933f00f38f 100644 --- a/java/flight/flight-sql/src/main/java/org/apache/arrow/flight/sql/FlightSqlClient.java +++ b/java/flight/flight-sql/src/main/java/org/apache/arrow/flight/sql/FlightSqlClient.java @@ -951,12 +951,11 @@ public static class PreparedStatement implements AutoCloseable { * {@code PreparedStatement} setters. */ public void setParameters(final VectorSchemaRoot parameterBindingRoot) { - if (this.parameterBindingRoot != null) { - if (this.parameterBindingRoot.equals(parameterBindingRoot)) { - return; - } - this.parameterBindingRoot.close(); + if (parameterBindingRoot == this.parameterBindingRoot) { + // Nothing to do if we're attempting to set the same parameters again. + return; } + clearParameters(); this.parameterBindingRoot = parameterBindingRoot; } @@ -1038,19 +1037,25 @@ public FlightInfo execute(final CallOption... options) { .toByteArray()); if (parameterBindingRoot != null && parameterBindingRoot.getRowCount() > 0) { - final SyncPutListener putListener = new SyncPutListener(); - - FlightClient.ClientStreamListener listener = - client.startPut(descriptor, parameterBindingRoot, putListener, options); - - listener.putNext(); - listener.completed(); - listener.getResult(); + putParameters(descriptor, options); } return client.getInfo(descriptor, options); } + private SyncPutListener putParameters(FlightDescriptor descriptor, CallOption... options) { + final SyncPutListener putListener = new SyncPutListener(); + + FlightClient.ClientStreamListener listener = + client.startPut(descriptor, parameterBindingRoot, putListener, options); + + listener.putNext(); + listener.completed(); + listener.getResult(); + + return putListener; + } + /** * Checks whether this client is open. * @@ -1074,11 +1079,8 @@ public long executeUpdate(final CallOption... options) { .build()) .toByteArray()); setParameters(parameterBindingRoot == null ? VectorSchemaRoot.of() : parameterBindingRoot); - final SyncPutListener putListener = new SyncPutListener(); - final FlightClient.ClientStreamListener listener = - client.startPut(descriptor, parameterBindingRoot, putListener, options); - listener.putNext(); - listener.completed(); + SyncPutListener putListener = putParameters(descriptor, options); + try { final PutResult read = putListener.read(); try (final ArrowBuf metadata = read.getApplicationMetadata()) { @@ -1112,9 +1114,7 @@ public void close(final CallOption... options) { final Iterator closePreparedStatementResults = client.doAction(action, options); closePreparedStatementResults.forEachRemaining(result -> { }); - if (parameterBindingRoot != null) { - parameterBindingRoot.close(); - } + clearParameters(); } @Override diff --git a/java/flight/flight-sql/src/test/java/org/apache/arrow/flight/TestFlightSqlStreams.java b/java/flight/flight-sql/src/test/java/org/apache/arrow/flight/TestFlightSqlStreams.java index 11d00742fd243..1dd925eb53add 100644 --- a/java/flight/flight-sql/src/test/java/org/apache/arrow/flight/TestFlightSqlStreams.java +++ b/java/flight/flight-sql/src/test/java/org/apache/arrow/flight/TestFlightSqlStreams.java @@ -46,7 +46,6 @@ import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import com.google.common.collect.ImmutableList; @@ -209,10 +208,13 @@ public static void setUp() throws Exception { @AfterAll public static void tearDown() throws Exception { - close(sqlClient, server, allocator); + close(sqlClient, server); + + // Manually close all child allocators. + allocator.getChildAllocators().forEach(BufferAllocator::close); + close(allocator); } - @Disabled("Memory leak GH-38268") @Test public void testGetTablesResultNoSchema() throws Exception { try (final FlightStream stream = @@ -232,7 +234,6 @@ public void testGetTablesResultNoSchema() throws Exception { } } - @Disabled("Memory leak GH-38268") @Test public void testGetTableTypesResult() throws Exception { try (final FlightStream stream = @@ -251,7 +252,6 @@ public void testGetTableTypesResult() throws Exception { } } - @Disabled("Memory leak GH-38268") @Test public void testGetSqlInfoResults() throws Exception { final FlightInfo info = sqlClient.getSqlInfo(); @@ -263,7 +263,6 @@ public void testGetSqlInfoResults() throws Exception { } } - @Disabled("Memory leak GH-38268") @Test public void testGetTypeInfo() throws Exception { FlightInfo flightInfo = sqlClient.getXdbcTypeInfo(); @@ -280,7 +279,6 @@ public void testGetTypeInfo() throws Exception { } } - @Disabled("Memory leak GH-38268") @Test public void testExecuteQuery() throws Exception { try (final FlightStream stream = sqlClient diff --git a/java/flight/pom.xml b/java/flight/pom.xml index b6b84278c1960..7ddda94f77b49 100644 --- a/java/flight/pom.xml +++ b/java/flight/pom.xml @@ -15,7 +15,7 @@ arrow-java-root org.apache.arrow - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT 4.0.0 diff --git a/java/format/pom.xml b/java/format/pom.xml index c8e83336c0ff3..3f581311e20ea 100644 --- a/java/format/pom.xml +++ b/java/format/pom.xml @@ -15,7 +15,7 @@ arrow-java-root org.apache.arrow - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT arrow-format diff --git a/java/format/src/main/java/org/apache/arrow/flatbuf/BinaryView.java b/java/format/src/main/java/org/apache/arrow/flatbuf/BinaryView.java new file mode 100644 index 0000000000000..56a8d329532c0 --- /dev/null +++ b/java/format/src/main/java/org/apache/arrow/flatbuf/BinaryView.java @@ -0,0 +1,57 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// automatically generated by the FlatBuffers compiler, do not modify + +package org.apache.arrow.flatbuf; + +import java.nio.*; +import java.lang.*; +import java.util.*; +import com.google.flatbuffers.*; + +@SuppressWarnings("unused") +/** + * Logically the same as Binary, but the internal representation uses a view + * struct that contains the string length and either the string's entire data + * inline (for small strings) or an inlined prefix, an index of another buffer, + * and an offset pointing to a slice in that buffer (for non-small strings). + * + * Since it uses a variable number of data buffers, each Field with this type + * must have a corresponding entry in `variadicBufferCounts`. + */ +public final class BinaryView extends Table { + public static void ValidateVersion() { Constants.FLATBUFFERS_1_12_0(); } + public static BinaryView getRootAsBinaryView(ByteBuffer _bb) { return getRootAsBinaryView(_bb, new BinaryView()); } + public static BinaryView getRootAsBinaryView(ByteBuffer _bb, BinaryView obj) { _bb.order(ByteOrder.LITTLE_ENDIAN); return (obj.__assign(_bb.getInt(_bb.position()) + _bb.position(), _bb)); } + public void __init(int _i, ByteBuffer _bb) { __reset(_i, _bb); } + public BinaryView __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; } + + + public static void startBinaryView(FlatBufferBuilder builder) { builder.startTable(0); } + public static int endBinaryView(FlatBufferBuilder builder) { + int o = builder.endTable(); + return o; + } + + public static final class Vector extends BaseVector { + public Vector __assign(int _vector, int _element_size, ByteBuffer _bb) { __reset(_vector, _element_size, _bb); return this; } + + public BinaryView get(int j) { return get(new BinaryView(), j); } + public BinaryView get(BinaryView obj, int j) { return obj.__assign(__indirect(__element(j), bb), bb); } + } +} + diff --git a/java/format/src/main/java/org/apache/arrow/flatbuf/BodyCompression.java b/java/format/src/main/java/org/apache/arrow/flatbuf/BodyCompression.java index 650454eb154b4..ed8ce0939a044 100644 --- a/java/format/src/main/java/org/apache/arrow/flatbuf/BodyCompression.java +++ b/java/format/src/main/java/org/apache/arrow/flatbuf/BodyCompression.java @@ -37,7 +37,8 @@ public final class BodyCompression extends Table { public BodyCompression __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; } /** - * Compressor library + * Compressor library. + * For LZ4_FRAME, each compressed buffer must consist of a single frame. */ public byte codec() { int o = __offset(4); return o != 0 ? bb.get(o + bb_pos) : 0; } /** diff --git a/java/format/src/main/java/org/apache/arrow/flatbuf/Date.java b/java/format/src/main/java/org/apache/arrow/flatbuf/Date.java index b2fcc9e39e38d..ac6e389835a43 100644 --- a/java/format/src/main/java/org/apache/arrow/flatbuf/Date.java +++ b/java/format/src/main/java/org/apache/arrow/flatbuf/Date.java @@ -25,8 +25,8 @@ @SuppressWarnings("unused") /** - * Date is either a 32-bit or 64-bit type representing elapsed time since UNIX - * epoch (1970-01-01), stored in either of two units: + * Date is either a 32-bit or 64-bit signed integer type representing an + * elapsed time since UNIX epoch (1970-01-01), stored in either of two units: * * * Milliseconds (64 bits) indicating UNIX time elapsed since the epoch (no * leap seconds), where the values are evenly divisible by 86400000 diff --git a/java/format/src/main/java/org/apache/arrow/flatbuf/LargeListView.java b/java/format/src/main/java/org/apache/arrow/flatbuf/LargeListView.java new file mode 100644 index 0000000000000..08c31c23a943f --- /dev/null +++ b/java/format/src/main/java/org/apache/arrow/flatbuf/LargeListView.java @@ -0,0 +1,52 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// automatically generated by the FlatBuffers compiler, do not modify + +package org.apache.arrow.flatbuf; + +import java.nio.*; +import java.lang.*; +import java.util.*; +import com.google.flatbuffers.*; + +@SuppressWarnings("unused") +/** + * Same as ListView, but with 64-bit offsets and sizes, allowing to represent + * extremely large data values. + */ +public final class LargeListView extends Table { + public static void ValidateVersion() { Constants.FLATBUFFERS_1_12_0(); } + public static LargeListView getRootAsLargeListView(ByteBuffer _bb) { return getRootAsLargeListView(_bb, new LargeListView()); } + public static LargeListView getRootAsLargeListView(ByteBuffer _bb, LargeListView obj) { _bb.order(ByteOrder.LITTLE_ENDIAN); return (obj.__assign(_bb.getInt(_bb.position()) + _bb.position(), _bb)); } + public void __init(int _i, ByteBuffer _bb) { __reset(_i, _bb); } + public LargeListView __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; } + + + public static void startLargeListView(FlatBufferBuilder builder) { builder.startTable(0); } + public static int endLargeListView(FlatBufferBuilder builder) { + int o = builder.endTable(); + return o; + } + + public static final class Vector extends BaseVector { + public Vector __assign(int _vector, int _element_size, ByteBuffer _bb) { __reset(_vector, _element_size, _bb); return this; } + + public LargeListView get(int j) { return get(new LargeListView(), j); } + public LargeListView get(LargeListView obj, int j) { return obj.__assign(__indirect(__element(j), bb), bb); } + } +} + diff --git a/java/format/src/main/java/org/apache/arrow/flatbuf/ListView.java b/java/format/src/main/java/org/apache/arrow/flatbuf/ListView.java new file mode 100644 index 0000000000000..2c9ad4c13d884 --- /dev/null +++ b/java/format/src/main/java/org/apache/arrow/flatbuf/ListView.java @@ -0,0 +1,53 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// automatically generated by the FlatBuffers compiler, do not modify + +package org.apache.arrow.flatbuf; + +import java.nio.*; +import java.lang.*; +import java.util.*; +import com.google.flatbuffers.*; + +@SuppressWarnings("unused") +/** + * Represents the same logical types that List can, but contains offsets and + * sizes allowing for writes in any order and sharing of child values among + * list values. + */ +public final class ListView extends Table { + public static void ValidateVersion() { Constants.FLATBUFFERS_1_12_0(); } + public static ListView getRootAsListView(ByteBuffer _bb) { return getRootAsListView(_bb, new ListView()); } + public static ListView getRootAsListView(ByteBuffer _bb, ListView obj) { _bb.order(ByteOrder.LITTLE_ENDIAN); return (obj.__assign(_bb.getInt(_bb.position()) + _bb.position(), _bb)); } + public void __init(int _i, ByteBuffer _bb) { __reset(_i, _bb); } + public ListView __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; } + + + public static void startListView(FlatBufferBuilder builder) { builder.startTable(0); } + public static int endListView(FlatBufferBuilder builder) { + int o = builder.endTable(); + return o; + } + + public static final class Vector extends BaseVector { + public Vector __assign(int _vector, int _element_size, ByteBuffer _bb) { __reset(_vector, _element_size, _bb); return this; } + + public ListView get(int j) { return get(new ListView(), j); } + public ListView get(ListView obj, int j) { return obj.__assign(__indirect(__element(j), bb), bb); } + } +} + diff --git a/java/format/src/main/java/org/apache/arrow/flatbuf/RecordBatch.java b/java/format/src/main/java/org/apache/arrow/flatbuf/RecordBatch.java index eb814e07dcceb..ce907ee0fdcda 100644 --- a/java/format/src/main/java/org/apache/arrow/flatbuf/RecordBatch.java +++ b/java/format/src/main/java/org/apache/arrow/flatbuf/RecordBatch.java @@ -67,27 +67,54 @@ public final class RecordBatch extends Table { */ public org.apache.arrow.flatbuf.BodyCompression compression() { return compression(new org.apache.arrow.flatbuf.BodyCompression()); } public org.apache.arrow.flatbuf.BodyCompression compression(org.apache.arrow.flatbuf.BodyCompression obj) { int o = __offset(10); return o != 0 ? obj.__assign(__indirect(o + bb_pos), bb) : null; } + /** + * Some types such as Utf8View are represented using a variable number of buffers. + * For each such Field in the pre-ordered flattened logical schema, there will be + * an entry in variadicBufferCounts to indicate the number of number of variadic + * buffers which belong to that Field in the current RecordBatch. + * + * For example, the schema + * col1: Struct + * col2: Utf8View + * contains two Fields with variadic buffers so variadicBufferCounts will have + * two entries, the first counting the variadic buffers of `col1.beta` and the + * second counting `col2`'s. + * + * This field may be omitted if and only if the schema contains no Fields with + * a variable number of buffers, such as BinaryView and Utf8View. + */ + public long variadicBufferCounts(int j) { int o = __offset(12); return o != 0 ? bb.getLong(__vector(o) + j * 8) : 0; } + public int variadicBufferCountsLength() { int o = __offset(12); return o != 0 ? __vector_len(o) : 0; } + public LongVector variadicBufferCountsVector() { return variadicBufferCountsVector(new LongVector()); } + public LongVector variadicBufferCountsVector(LongVector obj) { int o = __offset(12); return o != 0 ? obj.__assign(__vector(o), bb) : null; } + public ByteBuffer variadicBufferCountsAsByteBuffer() { return __vector_as_bytebuffer(12, 8); } + public ByteBuffer variadicBufferCountsInByteBuffer(ByteBuffer _bb) { return __vector_in_bytebuffer(_bb, 12, 8); } public static int createRecordBatch(FlatBufferBuilder builder, long length, int nodesOffset, int buffersOffset, - int compressionOffset) { - builder.startTable(4); + int compressionOffset, + int variadicBufferCountsOffset) { + builder.startTable(5); RecordBatch.addLength(builder, length); + RecordBatch.addVariadicBufferCounts(builder, variadicBufferCountsOffset); RecordBatch.addCompression(builder, compressionOffset); RecordBatch.addBuffers(builder, buffersOffset); RecordBatch.addNodes(builder, nodesOffset); return RecordBatch.endRecordBatch(builder); } - public static void startRecordBatch(FlatBufferBuilder builder) { builder.startTable(4); } + public static void startRecordBatch(FlatBufferBuilder builder) { builder.startTable(5); } public static void addLength(FlatBufferBuilder builder, long length) { builder.addLong(0, length, 0L); } public static void addNodes(FlatBufferBuilder builder, int nodesOffset) { builder.addOffset(1, nodesOffset, 0); } public static void startNodesVector(FlatBufferBuilder builder, int numElems) { builder.startVector(16, numElems, 8); } public static void addBuffers(FlatBufferBuilder builder, int buffersOffset) { builder.addOffset(2, buffersOffset, 0); } public static void startBuffersVector(FlatBufferBuilder builder, int numElems) { builder.startVector(16, numElems, 8); } public static void addCompression(FlatBufferBuilder builder, int compressionOffset) { builder.addOffset(3, compressionOffset, 0); } + public static void addVariadicBufferCounts(FlatBufferBuilder builder, int variadicBufferCountsOffset) { builder.addOffset(4, variadicBufferCountsOffset, 0); } + public static int createVariadicBufferCountsVector(FlatBufferBuilder builder, long[] data) { builder.startVector(8, data.length, 8); for (int i = data.length - 1; i >= 0; i--) builder.addLong(data[i]); return builder.endVector(); } + public static void startVariadicBufferCountsVector(FlatBufferBuilder builder, int numElems) { builder.startVector(8, numElems, 8); } public static int endRecordBatch(FlatBufferBuilder builder) { int o = builder.endTable(); return o; diff --git a/java/format/src/main/java/org/apache/arrow/flatbuf/RunEndEncoded.java b/java/format/src/main/java/org/apache/arrow/flatbuf/RunEndEncoded.java new file mode 100644 index 0000000000000..d48733ef0c826 --- /dev/null +++ b/java/format/src/main/java/org/apache/arrow/flatbuf/RunEndEncoded.java @@ -0,0 +1,55 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// automatically generated by the FlatBuffers compiler, do not modify + +package org.apache.arrow.flatbuf; + +import java.nio.*; +import java.lang.*; +import java.util.*; +import com.google.flatbuffers.*; + +@SuppressWarnings("unused") +/** + * Contains two child arrays, run_ends and values. + * The run_ends child array must be a 16/32/64-bit integer array + * which encodes the indices at which the run with the value in + * each corresponding index in the values child array ends. + * Like list/struct types, the value array can be of any type. + */ +public final class RunEndEncoded extends Table { + public static void ValidateVersion() { Constants.FLATBUFFERS_1_12_0(); } + public static RunEndEncoded getRootAsRunEndEncoded(ByteBuffer _bb) { return getRootAsRunEndEncoded(_bb, new RunEndEncoded()); } + public static RunEndEncoded getRootAsRunEndEncoded(ByteBuffer _bb, RunEndEncoded obj) { _bb.order(ByteOrder.LITTLE_ENDIAN); return (obj.__assign(_bb.getInt(_bb.position()) + _bb.position(), _bb)); } + public void __init(int _i, ByteBuffer _bb) { __reset(_i, _bb); } + public RunEndEncoded __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; } + + + public static void startRunEndEncoded(FlatBufferBuilder builder) { builder.startTable(0); } + public static int endRunEndEncoded(FlatBufferBuilder builder) { + int o = builder.endTable(); + return o; + } + + public static final class Vector extends BaseVector { + public Vector __assign(int _vector, int _element_size, ByteBuffer _bb) { __reset(_vector, _element_size, _bb); return this; } + + public RunEndEncoded get(int j) { return get(new RunEndEncoded(), j); } + public RunEndEncoded get(RunEndEncoded obj, int j) { return obj.__assign(__indirect(__element(j), bb), bb); } + } +} + diff --git a/java/format/src/main/java/org/apache/arrow/flatbuf/Time.java b/java/format/src/main/java/org/apache/arrow/flatbuf/Time.java index 596d403a3eae9..9acc3fc7a5ea1 100644 --- a/java/format/src/main/java/org/apache/arrow/flatbuf/Time.java +++ b/java/format/src/main/java/org/apache/arrow/flatbuf/Time.java @@ -25,9 +25,20 @@ @SuppressWarnings("unused") /** - * Time type. The physical storage type depends on the unit - * - SECOND and MILLISECOND: 32 bits - * - MICROSECOND and NANOSECOND: 64 bits + * Time is either a 32-bit or 64-bit signed integer type representing an + * elapsed time since midnight, stored in either of four units: seconds, + * milliseconds, microseconds or nanoseconds. + * + * The integer `bitWidth` depends on the `unit` and must be one of the following: + * * SECOND and MILLISECOND: 32 bits + * * MICROSECOND and NANOSECOND: 64 bits + * + * The allowed values are between 0 (inclusive) and 86400 (=24*60*60) seconds + * (exclusive), adjusted for the time unit (for example, up to 86400000 + * exclusive for the MILLISECOND unit). + * This definition doesn't allow for leap seconds. Time values from + * measurements with leap seconds will need to be corrected when ingesting + * into Arrow (for example by replacing the value 86400 with 86399). */ public final class Time extends Table { public static void ValidateVersion() { Constants.FLATBUFFERS_1_12_0(); } diff --git a/java/format/src/main/java/org/apache/arrow/flatbuf/Timestamp.java b/java/format/src/main/java/org/apache/arrow/flatbuf/Timestamp.java index 041452607c9e1..fe0c6aaea24fa 100644 --- a/java/format/src/main/java/org/apache/arrow/flatbuf/Timestamp.java +++ b/java/format/src/main/java/org/apache/arrow/flatbuf/Timestamp.java @@ -25,37 +25,111 @@ @SuppressWarnings("unused") /** - * Time elapsed from the Unix epoch, 00:00:00.000 on 1 January 1970, excluding - * leap seconds, as a 64-bit integer. Note that UNIX time does not include - * leap seconds. + * Timestamp is a 64-bit signed integer representing an elapsed time since a + * fixed epoch, stored in either of four units: seconds, milliseconds, + * microseconds or nanoseconds, and is optionally annotated with a timezone. + * + * Timestamp values do not include any leap seconds (in other words, all + * days are considered 86400 seconds long). + * + * Timestamps with a non-empty timezone + * ------------------------------------ + * + * If a Timestamp column has a non-empty timezone value, its epoch is + * 1970-01-01 00:00:00 (January 1st 1970, midnight) in the *UTC* timezone + * (the Unix epoch), regardless of the Timestamp's own timezone. + * + * Therefore, timestamp values with a non-empty timezone correspond to + * physical points in time together with some additional information about + * how the data was obtained and/or how to display it (the timezone). + * + * For example, the timestamp value 0 with the timezone string "Europe/Paris" + * corresponds to "January 1st 1970, 00h00" in the UTC timezone, but the + * application may prefer to display it as "January 1st 1970, 01h00" in + * the Europe/Paris timezone (which is the same physical point in time). + * + * One consequence is that timestamp values with a non-empty timezone + * can be compared and ordered directly, since they all share the same + * well-known point of reference (the Unix epoch). + * + * Timestamps with an unset / empty timezone + * ----------------------------------------- + * + * If a Timestamp column has no timezone value, its epoch is + * 1970-01-01 00:00:00 (January 1st 1970, midnight) in an *unknown* timezone. + * + * Therefore, timestamp values without a timezone cannot be meaningfully + * interpreted as physical points in time, but only as calendar / clock + * indications ("wall clock time") in an unspecified timezone. + * + * For example, the timestamp value 0 with an empty timezone string + * corresponds to "January 1st 1970, 00h00" in an unknown timezone: there + * is not enough information to interpret it as a well-defined physical + * point in time. + * + * One consequence is that timestamp values without a timezone cannot + * be reliably compared or ordered, since they may have different points of + * reference. In particular, it is *not* possible to interpret an unset + * or empty timezone as the same as "UTC". + * + * Conversion between timezones + * ---------------------------- + * + * If a Timestamp column has a non-empty timezone, changing the timezone + * to a different non-empty value is a metadata-only operation: + * the timestamp values need not change as their point of reference remains + * the same (the Unix epoch). + * + * However, if a Timestamp column has no timezone value, changing it to a + * non-empty value requires to think about the desired semantics. + * One possibility is to assume that the original timestamp values are + * relative to the epoch of the timezone being set; timestamp values should + * then adjusted to the Unix epoch (for example, changing the timezone from + * empty to "Europe/Paris" would require converting the timestamp values + * from "Europe/Paris" to "UTC", which seems counter-intuitive but is + * nevertheless correct). + * + * Guidelines for encoding data from external libraries + * ---------------------------------------------------- * * Date & time libraries often have multiple different data types for temporal - * data. In order to ease interoperability between different implementations the + * data. In order to ease interoperability between different implementations the * Arrow project has some recommendations for encoding these types into a Timestamp * column. * - * An "instant" represents a single moment in time that has no meaningful time zone - * or the time zone is unknown. A column of instants can also contain values from - * multiple time zones. To encode an instant set the timezone string to "UTC". - * - * A "zoned date-time" represents a single moment in time that has a meaningful - * reference time zone. To encode a zoned date-time as a Timestamp set the timezone - * string to the name of the timezone. There is some ambiguity between an instant - * and a zoned date-time with the UTC time zone. Both of these are stored the same. - * Typically, this distinction does not matter. If it does, then an application should - * use custom metadata or an extension type to distinguish between the two cases. - * - * An "offset date-time" represents a single moment in time combined with a meaningful - * offset from UTC. To encode an offset date-time as a Timestamp set the timezone string - * to the numeric time zone offset string (e.g. "+03:00"). - * - * A "local date-time" does not represent a single moment in time. It represents a wall - * clock time combined with a date. Because of daylight savings time there may multiple - * instants that correspond to a single local date-time in any given time zone. A - * local date-time is often stored as a struct or a Date32/Time64 pair. However, it can - * also be encoded into a Timestamp column. To do so the value should be the the time - * elapsed from the Unix epoch so that a wall clock in UTC would display the desired time. - * The timezone string should be set to null or the empty string. + * An "instant" represents a physical point in time that has no relevant timezone + * (for example, astronomical data). To encode an instant, use a Timestamp with + * the timezone string set to "UTC", and make sure the Timestamp values + * are relative to the UTC epoch (January 1st 1970, midnight). + * + * A "zoned date-time" represents a physical point in time annotated with an + * informative timezone (for example, the timezone in which the data was + * recorded). To encode a zoned date-time, use a Timestamp with the timezone + * string set to the name of the timezone, and make sure the Timestamp values + * are relative to the UTC epoch (January 1st 1970, midnight). + * + * (There is some ambiguity between an instant and a zoned date-time with the + * UTC timezone. Both of these are stored the same in Arrow. Typically, + * this distinction does not matter. If it does, then an application should + * use custom metadata or an extension type to distinguish between the two cases.) + * + * An "offset date-time" represents a physical point in time combined with an + * explicit offset from UTC. To encode an offset date-time, use a Timestamp + * with the timezone string set to the numeric timezone offset string + * (e.g. "+03:00"), and make sure the Timestamp values are relative to + * the UTC epoch (January 1st 1970, midnight). + * + * A "naive date-time" (also called "local date-time" in some libraries) + * represents a wall clock time combined with a calendar date, but with + * no indication of how to map this information to a physical point in time. + * Naive date-times must be handled with care because of this missing + * information, and also because daylight saving time (DST) may make + * some values ambiguous or non-existent. A naive date-time may be + * stored as a struct with Date and Time fields. However, it may also be + * encoded into a Timestamp column with an empty timezone. The timestamp + * values should be computed "as if" the timezone of the date-time values + * was UTC; for example, the naive date-time "January 1st 1970, 00h00" would + * be encoded as timestamp value 0. */ public final class Timestamp extends Table { public static void ValidateVersion() { Constants.FLATBUFFERS_1_12_0(); } @@ -66,24 +140,16 @@ public final class Timestamp extends Table { public short unit() { int o = __offset(4); return o != 0 ? bb.getShort(o + bb_pos) : 0; } /** - * The time zone is a string indicating the name of a time zone, one of: + * The timezone is an optional string indicating the name of a timezone, + * one of: * - * * As used in the Olson time zone database (the "tz database" or - * "tzdata"), such as "America/New_York" - * * An absolute time zone offset of the form +XX:XX or -XX:XX, such as +07:30 + * * As used in the Olson timezone database (the "tz database" or + * "tzdata"), such as "America/New_York". + * * An absolute timezone offset of the form "+XX:XX" or "-XX:XX", + * such as "+07:30". * * Whether a timezone string is present indicates different semantics about - * the data: - * - * * If the time zone is null or an empty string, the data is a local date-time - * and does not represent a single moment in time. Instead it represents a wall clock - * time and care should be taken to avoid interpreting it semantically as an instant. - * - * * If the time zone is set to a valid value, values can be displayed as - * "localized" to that time zone, even though the underlying 64-bit - * integers are identical to the same data stored in UTC. Converting - * between time zones is a metadata-only operation and does not change the - * underlying values + * the data (see above). */ public String timezone() { int o = __offset(6); return o != 0 ? __string(o + bb_pos) : null; } public ByteBuffer timezoneAsByteBuffer() { return __vector_as_bytebuffer(6, 1); } diff --git a/java/format/src/main/java/org/apache/arrow/flatbuf/Type.java b/java/format/src/main/java/org/apache/arrow/flatbuf/Type.java index 5f1a550cffff4..29248bb23c303 100644 --- a/java/format/src/main/java/org/apache/arrow/flatbuf/Type.java +++ b/java/format/src/main/java/org/apache/arrow/flatbuf/Type.java @@ -47,8 +47,13 @@ private Type() { } public static final byte LargeBinary = 19; public static final byte LargeUtf8 = 20; public static final byte LargeList = 21; + public static final byte RunEndEncoded = 22; + public static final byte BinaryView = 23; + public static final byte Utf8View = 24; + public static final byte ListView = 25; + public static final byte LargeListView = 26; - public static final String[] names = { "NONE", "Null", "Int", "FloatingPoint", "Binary", "Utf8", "Bool", "Decimal", "Date", "Time", "Timestamp", "Interval", "List", "Struct_", "Union", "FixedSizeBinary", "FixedSizeList", "Map", "Duration", "LargeBinary", "LargeUtf8", "LargeList", }; + public static final String[] names = { "NONE", "Null", "Int", "FloatingPoint", "Binary", "Utf8", "Bool", "Decimal", "Date", "Time", "Timestamp", "Interval", "List", "Struct_", "Union", "FixedSizeBinary", "FixedSizeList", "Map", "Duration", "LargeBinary", "LargeUtf8", "LargeList", "RunEndEncoded", "BinaryView", "Utf8View", "ListView", "LargeListView", }; public static String name(int e) { return names[e]; } } diff --git a/java/format/src/main/java/org/apache/arrow/flatbuf/Utf8View.java b/java/format/src/main/java/org/apache/arrow/flatbuf/Utf8View.java new file mode 100644 index 0000000000000..035c977576e43 --- /dev/null +++ b/java/format/src/main/java/org/apache/arrow/flatbuf/Utf8View.java @@ -0,0 +1,57 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// automatically generated by the FlatBuffers compiler, do not modify + +package org.apache.arrow.flatbuf; + +import java.nio.*; +import java.lang.*; +import java.util.*; +import com.google.flatbuffers.*; + +@SuppressWarnings("unused") +/** + * Logically the same as Utf8, but the internal representation uses a view + * struct that contains the string length and either the string's entire data + * inline (for small strings) or an inlined prefix, an index of another buffer, + * and an offset pointing to a slice in that buffer (for non-small strings). + * + * Since it uses a variable number of data buffers, each Field with this type + * must have a corresponding entry in `variadicBufferCounts`. + */ +public final class Utf8View extends Table { + public static void ValidateVersion() { Constants.FLATBUFFERS_1_12_0(); } + public static Utf8View getRootAsUtf8View(ByteBuffer _bb) { return getRootAsUtf8View(_bb, new Utf8View()); } + public static Utf8View getRootAsUtf8View(ByteBuffer _bb, Utf8View obj) { _bb.order(ByteOrder.LITTLE_ENDIAN); return (obj.__assign(_bb.getInt(_bb.position()) + _bb.position(), _bb)); } + public void __init(int _i, ByteBuffer _bb) { __reset(_i, _bb); } + public Utf8View __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; } + + + public static void startUtf8View(FlatBufferBuilder builder) { builder.startTable(0); } + public static int endUtf8View(FlatBufferBuilder builder) { + int o = builder.endTable(); + return o; + } + + public static final class Vector extends BaseVector { + public Vector __assign(int _vector, int _element_size, ByteBuffer _bb) { __reset(_vector, _element_size, _bb); return this; } + + public Utf8View get(int j) { return get(new Utf8View(), j); } + public Utf8View get(Utf8View obj, int j) { return obj.__assign(__indirect(__element(j), bb), bb); } + } +} + diff --git a/java/gandiva/pom.xml b/java/gandiva/pom.xml index c9ca3ef615d0e..cfda0b0c527a9 100644 --- a/java/gandiva/pom.xml +++ b/java/gandiva/pom.xml @@ -14,7 +14,7 @@ org.apache.arrow arrow-java-root - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT org.apache.arrow.gandiva diff --git a/java/memory/memory-core/pom.xml b/java/memory/memory-core/pom.xml index 62d74bd0925c9..8f28699045383 100644 --- a/java/memory/memory-core/pom.xml +++ b/java/memory/memory-core/pom.xml @@ -13,7 +13,7 @@ arrow-memory org.apache.arrow - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT 4.0.0 diff --git a/java/memory/memory-netty/pom.xml b/java/memory/memory-netty/pom.xml index 626452dba8777..307f6ad81a0f1 100644 --- a/java/memory/memory-netty/pom.xml +++ b/java/memory/memory-netty/pom.xml @@ -13,7 +13,7 @@ arrow-memory org.apache.arrow - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT 4.0.0 diff --git a/java/memory/memory-unsafe/pom.xml b/java/memory/memory-unsafe/pom.xml index 33e2058d5f2da..9f8137308195d 100644 --- a/java/memory/memory-unsafe/pom.xml +++ b/java/memory/memory-unsafe/pom.xml @@ -13,7 +13,7 @@ arrow-memory org.apache.arrow - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT 4.0.0 diff --git a/java/memory/pom.xml b/java/memory/pom.xml index 0ddbf63ef38b9..c10263b97f1bf 100644 --- a/java/memory/pom.xml +++ b/java/memory/pom.xml @@ -14,7 +14,7 @@ org.apache.arrow arrow-java-root - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT arrow-memory Arrow Memory diff --git a/java/performance/pom.xml b/java/performance/pom.xml index c6dc572935cb1..102832491ec05 100644 --- a/java/performance/pom.xml +++ b/java/performance/pom.xml @@ -14,7 +14,7 @@ arrow-java-root org.apache.arrow - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT arrow-performance jar diff --git a/java/pom.xml b/java/pom.xml index 417bdd5da375a..2a9997b7012b7 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -20,7 +20,7 @@ org.apache.arrow arrow-java-root - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT pom Apache Arrow Java Root POM @@ -33,7 +33,7 @@ 5.9.0 1.7.25 31.1-jre - 4.1.96.Final + 4.1.100.Final 1.56.0 3.23.1 2.15.1 diff --git a/java/tools/pom.xml b/java/tools/pom.xml index b139b3876e4ae..128825c224369 100644 --- a/java/tools/pom.xml +++ b/java/tools/pom.xml @@ -14,7 +14,7 @@ org.apache.arrow arrow-java-root - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT arrow-tools Arrow Tools diff --git a/java/vector/pom.xml b/java/vector/pom.xml index 015d089e9d028..4c8bf1e594aa4 100644 --- a/java/vector/pom.xml +++ b/java/vector/pom.xml @@ -14,7 +14,7 @@ org.apache.arrow arrow-java-root - 14.0.0-SNAPSHOT + 15.0.0-SNAPSHOT arrow-vector Arrow Vectors @@ -61,10 +61,6 @@ arrow-memory-unsafe test - - io.netty - netty-common - com.google.flatbuffers flatbuffers-java @@ -74,6 +70,11 @@ org.slf4j slf4j-api + + org.eclipse.collections + eclipse-collections + 11.1.0 + diff --git a/java/vector/src/main/codegen/templates/AbstractFieldWriter.java b/java/vector/src/main/codegen/templates/AbstractFieldWriter.java index bb4ee45eaa073..6c2368117f7c2 100644 --- a/java/vector/src/main/codegen/templates/AbstractFieldWriter.java +++ b/java/vector/src/main/codegen/templates/AbstractFieldWriter.java @@ -27,6 +27,9 @@ /* * This class is generated using freemarker and the ${.template_name} template. + * Note that changes to the AbstractFieldWriter template should also get reflected in the + * AbstractPromotableFieldWriter, ComplexWriters, UnionFixedSizeListWriter, UnionListWriter + * and UnionWriter templates and the PromotableWriter concrete code. */ @SuppressWarnings("unused") abstract class AbstractFieldWriter extends AbstractBaseWriter implements FieldWriter { @@ -125,19 +128,19 @@ public void write(${name}Holder holder) { <#if minor.class?ends_with("VarBinary")> - public void writeTo${minor.class}(byte[] value) { + public void write${minor.class}(byte[] value) { fail("${name}"); } - public void writeTo${minor.class}(byte[] value, int offset, int length) { + public void write${minor.class}(byte[] value, int offset, int length) { fail("${name}"); } - public void writeTo${minor.class}(ByteBuffer value) { + public void write${minor.class}(ByteBuffer value) { fail("${name}"); } - public void writeTo${minor.class}(ByteBuffer value, int offset, int length) { + public void write${minor.class}(ByteBuffer value, int offset, int length) { fail("${name}"); } diff --git a/java/vector/src/main/codegen/templates/AbstractPromotableFieldWriter.java b/java/vector/src/main/codegen/templates/AbstractPromotableFieldWriter.java index 2f963a9df0d05..59f9fb5b8098d 100644 --- a/java/vector/src/main/codegen/templates/AbstractPromotableFieldWriter.java +++ b/java/vector/src/main/codegen/templates/AbstractPromotableFieldWriter.java @@ -221,6 +221,38 @@ public void write(${name}Holder holder) { } + <#if minor.class?ends_with("VarBinary")> + @Override + public void write${minor.class}(byte[] value) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(value); + } + + @Override + public void write${minor.class}(byte[] value, int offset, int length) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(value, offset, length); + } + + @Override + public void write${minor.class}(ByteBuffer value) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(value); + } + + @Override + public void write${minor.class}(ByteBuffer value, int offset, int length) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(value, offset, length); + } + <#elseif minor.class?ends_with("VarChar")> + @Override + public void write${minor.class}(Text value) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(value); + } + + @Override + public void write${minor.class}(String value) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(value); + } + + public void writeNull() { } diff --git a/java/vector/src/main/codegen/templates/ComplexWriters.java b/java/vector/src/main/codegen/templates/ComplexWriters.java index 51a52a6e3070d..2e3caae1f0f22 100644 --- a/java/vector/src/main/codegen/templates/ComplexWriters.java +++ b/java/vector/src/main/codegen/templates/ComplexWriters.java @@ -194,22 +194,22 @@ public void writeNull() { <#if minor.class?ends_with("VarBinary")> - public void writeTo${minor.class}(byte[] value) { + public void write${minor.class}(byte[] value) { vector.setSafe(idx(), value); vector.setValueCount(idx() + 1); } - public void writeTo${minor.class}(byte[] value, int offset, int length) { + public void write${minor.class}(byte[] value, int offset, int length) { vector.setSafe(idx(), value, offset, length); vector.setValueCount(idx() + 1); } - public void writeTo${minor.class}(ByteBuffer value) { + public void write${minor.class}(ByteBuffer value) { vector.setSafe(idx(), value, 0, value.remaining()); vector.setValueCount(idx() + 1); } - public void writeTo${minor.class}(ByteBuffer value, int offset, int length) { + public void write${minor.class}(ByteBuffer value, int offset, int length) { vector.setSafe(idx(), value, offset, length); vector.setValueCount(idx() + 1); } @@ -259,13 +259,13 @@ public interface ${eName}Writer extends BaseWriter { <#if minor.class?ends_with("VarBinary")> - public void writeTo${minor.class}(byte[] value); + public void write${minor.class}(byte[] value); - public void writeTo${minor.class}(byte[] value, int offset, int length); + public void write${minor.class}(byte[] value, int offset, int length); - public void writeTo${minor.class}(ByteBuffer value); + public void write${minor.class}(ByteBuffer value); - public void writeTo${minor.class}(ByteBuffer value, int offset, int length); + public void write${minor.class}(ByteBuffer value, int offset, int length); <#if minor.class?ends_with("VarChar")> diff --git a/java/vector/src/main/codegen/templates/UnionFixedSizeListWriter.java b/java/vector/src/main/codegen/templates/UnionFixedSizeListWriter.java index 55c661bfc6023..3436e3a967651 100644 --- a/java/vector/src/main/codegen/templates/UnionFixedSizeListWriter.java +++ b/java/vector/src/main/codegen/templates/UnionFixedSizeListWriter.java @@ -295,6 +295,62 @@ public void writeBigEndianBytesToDecimal256(byte[] value, ArrowType arrowType) { <#assign name = minor.class?cap_first /> <#assign fields = minor.fields!type.fields /> <#assign uncappedName = name?uncap_first/> + <#if minor.class?ends_with("VarBinary")> + @Override + public void write${minor.class}(byte[] value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(byte[] value, int offset, int length) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${minor.class}(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(ByteBuffer value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(ByteBuffer value, int offset, int length) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${minor.class}(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + <#elseif minor.class?ends_with("VarChar")> + @Override + public void write${minor.class}(Text value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(String value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + <#if !minor.typeParams?? > @Override public void write${name}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ) { diff --git a/java/vector/src/main/codegen/templates/UnionListWriter.java b/java/vector/src/main/codegen/templates/UnionListWriter.java index fac75a9ce5637..5c0565ee27175 100644 --- a/java/vector/src/main/codegen/templates/UnionListWriter.java +++ b/java/vector/src/main/codegen/templates/UnionListWriter.java @@ -274,6 +274,43 @@ public void write(${name}Holder holder) { writer.write${name}(<#list fields as field>holder.${field.name}<#if field_has_next>, ); writer.setPosition(writer.idx()+1); } + + + <#if minor.class?ends_with("VarBinary")> + @Override + public void write${minor.class}(byte[] value) { + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(byte[] value, int offset, int length) { + writer.write${minor.class}(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(ByteBuffer value) { + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(ByteBuffer value, int offset, int length) { + writer.write${minor.class}(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + <#elseif minor.class?ends_with("VarChar")> + @Override + public void write${minor.class}(Text value) { + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + public void write${minor.class}(String value) { + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } diff --git a/java/vector/src/main/codegen/templates/UnionWriter.java b/java/vector/src/main/codegen/templates/UnionWriter.java index 4efd1026cac4a..08dbf24324b17 100644 --- a/java/vector/src/main/codegen/templates/UnionWriter.java +++ b/java/vector/src/main/codegen/templates/UnionWriter.java @@ -302,6 +302,42 @@ public void write(${name}Holder holder) { get${name}Writer(arrowType).setPosition(idx()); get${name}Writer(arrowType).writeBigEndianBytesTo${name}(value, arrowType); } + <#elseif minor.class?ends_with("VarBinary")> + @Override + public void write${minor.class}(byte[] value) { + get${name}Writer().setPosition(idx()); + get${name}Writer().write${minor.class}(value); + } + + @Override + public void write${minor.class}(byte[] value, int offset, int length) { + get${name}Writer().setPosition(idx()); + get${name}Writer().write${minor.class}(value, offset, length); + } + + @Override + public void write${minor.class}(ByteBuffer value) { + get${name}Writer().setPosition(idx()); + get${name}Writer().write${minor.class}(value); + } + + @Override + public void write${minor.class}(ByteBuffer value, int offset, int length) { + get${name}Writer().setPosition(idx()); + get${name}Writer().write${minor.class}(value, offset, length); + } + <#elseif minor.class?ends_with("VarChar")> + @Override + public void write${minor.class}(${friendlyType} value) { + get${name}Writer().setPosition(idx()); + get${name}Writer().write${minor.class}(value); + } + + @Override + public void write${minor.class}(String value) { + get${name}Writer().setPosition(idx()); + get${name}Writer().write${minor.class}(value); + } diff --git a/java/vector/src/main/java/org/apache/arrow/vector/complex/MapVector.java b/java/vector/src/main/java/org/apache/arrow/vector/complex/MapVector.java index c1913574bab19..e082b2f43be64 100644 --- a/java/vector/src/main/java/org/apache/arrow/vector/complex/MapVector.java +++ b/java/vector/src/main/java/org/apache/arrow/vector/complex/MapVector.java @@ -146,6 +146,11 @@ public TransferPair getTransferPair(String ref, BufferAllocator allocator, CallB return new TransferImpl(ref, allocator, callBack); } + @Override + public TransferPair getTransferPair(Field field, BufferAllocator allocator, CallBack callBack) { + return new TransferImpl(field, allocator, callBack); + } + @Override public TransferPair makeTransferPair(ValueVector target) { return new MapVector.TransferImpl((MapVector) target); diff --git a/java/vector/src/main/java/org/apache/arrow/vector/complex/StructVector.java b/java/vector/src/main/java/org/apache/arrow/vector/complex/StructVector.java index d947249fd3cdd..d0304a6fd2504 100644 --- a/java/vector/src/main/java/org/apache/arrow/vector/complex/StructVector.java +++ b/java/vector/src/main/java/org/apache/arrow/vector/complex/StructVector.java @@ -246,6 +246,15 @@ public TransferPair getTransferPair(Field field, BufferAllocator allocator) { allowConflictPolicyChanges), false); } + @Override + public TransferPair getTransferPair(Field field, BufferAllocator allocator, CallBack callBack) { + return new NullableStructTransferPair(this, new StructVector(field, + allocator, + callBack, + getConflictPolicy(), + allowConflictPolicyChanges), false); + } + /** * {@link TransferPair} for this (nullable) {@link StructVector}. */ diff --git a/java/vector/src/main/java/org/apache/arrow/vector/complex/impl/PromotableWriter.java b/java/vector/src/main/java/org/apache/arrow/vector/complex/impl/PromotableWriter.java index d99efceae3eca..f7be277f592a6 100644 --- a/java/vector/src/main/java/org/apache/arrow/vector/complex/impl/PromotableWriter.java +++ b/java/vector/src/main/java/org/apache/arrow/vector/complex/impl/PromotableWriter.java @@ -18,6 +18,7 @@ package org.apache.arrow.vector.complex.impl; import java.math.BigDecimal; +import java.nio.ByteBuffer; import org.apache.arrow.memory.ArrowBuf; import org.apache.arrow.vector.FieldVector; @@ -37,6 +38,7 @@ import org.apache.arrow.vector.types.pojo.ArrowType; import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.FieldType; +import org.apache.arrow.vector.util.Text; import org.apache.arrow.vector.util.TransferPair; /** @@ -378,7 +380,66 @@ public void writeBigEndianBytesToDecimal256(byte[] value, ArrowType arrowType) { /*bitWidth=*/256)).writeBigEndianBytesToDecimal256(value, arrowType); } - + @Override + public void writeVarBinary(byte[] value) { + getWriter(MinorType.VARBINARY).writeVarBinary(value); + } + + @Override + public void writeVarBinary(byte[] value, int offset, int length) { + getWriter(MinorType.VARBINARY).writeVarBinary(value, offset, length); + } + + @Override + public void writeVarBinary(ByteBuffer value) { + getWriter(MinorType.VARBINARY).writeVarBinary(value); + } + + @Override + public void writeVarBinary(ByteBuffer value, int offset, int length) { + getWriter(MinorType.VARBINARY).writeVarBinary(value, offset, length); + } + + @Override + public void writeLargeVarBinary(byte[] value) { + getWriter(MinorType.LARGEVARBINARY).writeLargeVarBinary(value); + } + + @Override + public void writeLargeVarBinary(byte[] value, int offset, int length) { + getWriter(MinorType.LARGEVARBINARY).writeLargeVarBinary(value, offset, length); + } + + @Override + public void writeLargeVarBinary(ByteBuffer value) { + getWriter(MinorType.LARGEVARBINARY).writeLargeVarBinary(value); + } + + @Override + public void writeLargeVarBinary(ByteBuffer value, int offset, int length) { + getWriter(MinorType.LARGEVARBINARY).writeLargeVarBinary(value, offset, length); + } + + @Override + public void writeVarChar(Text value) { + getWriter(MinorType.VARCHAR).writeVarChar(value); + } + + @Override + public void writeVarChar(String value) { + getWriter(MinorType.VARCHAR).writeVarChar(value); + } + + @Override + public void writeLargeVarChar(Text value) { + getWriter(MinorType.LARGEVARCHAR).writeLargeVarChar(value); + } + + @Override + public void writeLargeVarChar(String value) { + getWriter(MinorType.LARGEVARCHAR).writeLargeVarChar(value); + } + @Override public void allocate() { getWriter().allocate(); diff --git a/java/vector/src/main/java/org/apache/arrow/vector/util/MapWithOrdinalImpl.java b/java/vector/src/main/java/org/apache/arrow/vector/util/MapWithOrdinalImpl.java index 3612d677ed5a9..7c9c0e9408860 100644 --- a/java/vector/src/main/java/org/apache/arrow/vector/util/MapWithOrdinalImpl.java +++ b/java/vector/src/main/java/org/apache/arrow/vector/util/MapWithOrdinalImpl.java @@ -25,15 +25,11 @@ import java.util.Map; import java.util.Set; import java.util.stream.Collectors; -import java.util.stream.StreamSupport; -import org.apache.arrow.util.Preconditions; +import org.eclipse.collections.impl.map.mutable.primitive.IntObjectHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import io.netty.util.collection.IntObjectHashMap; -import io.netty.util.collection.IntObjectMap; - /** * An implementation of map that supports constant time look-up by a generic key or an ordinal. * @@ -133,9 +129,7 @@ public Set keySet() { @Override public Collection values() { - return StreamSupport.stream(secondary.entries().spliterator(), false) - .map((IntObjectMap.PrimitiveEntry t) -> Preconditions.checkNotNull(t).value()) - .collect(Collectors.toList()); + return secondary.values(); } @Override diff --git a/java/vector/src/main/java/org/apache/arrow/vector/util/MultiMapWithOrdinal.java b/java/vector/src/main/java/org/apache/arrow/vector/util/MultiMapWithOrdinal.java index 5fbb45a7ac6ac..f722a8a86772c 100644 --- a/java/vector/src/main/java/org/apache/arrow/vector/util/MultiMapWithOrdinal.java +++ b/java/vector/src/main/java/org/apache/arrow/vector/util/MultiMapWithOrdinal.java @@ -25,7 +25,7 @@ import java.util.Set; import java.util.stream.Collectors; -import io.netty.util.collection.IntObjectHashMap; +import org.eclipse.collections.impl.map.mutable.primitive.IntObjectHashMap; /** * An implementation of a multimap that supports constant time look-up by a generic key or an ordinal. diff --git a/java/vector/src/test/java/org/apache/arrow/vector/TestFixedSizeListVector.java b/java/vector/src/test/java/org/apache/arrow/vector/TestFixedSizeListVector.java index 9d7e413a739ad..0023b1dddb8e7 100644 --- a/java/vector/src/test/java/org/apache/arrow/vector/TestFixedSizeListVector.java +++ b/java/vector/src/test/java/org/apache/arrow/vector/TestFixedSizeListVector.java @@ -24,6 +24,7 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import java.math.BigDecimal; +import java.nio.ByteBuffer; import java.util.Arrays; import java.util.List; @@ -37,6 +38,7 @@ import org.apache.arrow.vector.types.Types.MinorType; import org.apache.arrow.vector.types.pojo.ArrowType; import org.apache.arrow.vector.types.pojo.FieldType; +import org.apache.arrow.vector.util.Text; import org.apache.arrow.vector.util.TransferPair; import org.junit.After; import org.junit.Assert; @@ -457,18 +459,98 @@ public void testVectorWithNulls() { assertEquals(4, vector1.getValueCount()); - List realValue1 = vector1.getObject(0); + List realValue1 = vector1.getObject(0); assertEquals(values1, realValue1); - List realValue2 = vector1.getObject(1); + List realValue2 = vector1.getObject(1); assertEquals(values2, realValue2); - List realValue3 = vector1.getObject(2); + List realValue3 = vector1.getObject(2); assertEquals(values3, realValue3); - List realValue4 = vector1.getObject(3); + List realValue4 = vector1.getObject(3); assertEquals(values4, realValue4); } } - private int[] convertListToIntArray(List list) { + @Test + public void testWriteVarCharHelpers() throws Exception { + try (final FixedSizeListVector vector = FixedSizeListVector.empty("vector", /*listSize=*/4, allocator)) { + + UnionFixedSizeListWriter writer = vector.getWriter(); + writer.allocate(); + + writer.startList(); + writer.writeVarChar("row1,1"); + writer.writeVarChar(new Text("row1,2")); + writer.writeNull(); + writer.writeNull(); + writer.endList(); + + assertEquals("row1,1", vector.getObject(0).get(0).toString()); + assertEquals("row1,2", vector.getObject(0).get(1).toString()); + } + } + + @Test + public void testWriteLargeVarCharHelpers() throws Exception { + try (final FixedSizeListVector vector = FixedSizeListVector.empty("vector", /*listSize=*/4, allocator)) { + + UnionFixedSizeListWriter writer = vector.getWriter(); + writer.allocate(); + + writer.startList(); + writer.writeLargeVarChar("row1,1"); + writer.writeLargeVarChar(new Text("row1,2")); + writer.writeNull(); + writer.writeNull(); + writer.endList(); + + assertEquals("row1,1", vector.getObject(0).get(0).toString()); + assertEquals("row1,2", vector.getObject(0).get(1).toString()); + } + } + + @Test + public void testWriteVarBinaryHelpers() throws Exception { + try (final FixedSizeListVector vector = FixedSizeListVector.empty("vector", /*listSize=*/4, allocator)) { + + UnionFixedSizeListWriter writer = vector.getWriter(); + writer.allocate(); + + writer.startList(); + writer.writeVarBinary("row1,1".getBytes()); + writer.writeVarBinary("row1,2".getBytes(), 0, "row1,2".getBytes().length); + writer.writeVarBinary(ByteBuffer.wrap("row1,3".getBytes())); + writer.writeVarBinary(ByteBuffer.wrap("row1,4".getBytes()), 0, "row1,4".getBytes().length); + writer.endList(); + + assertEquals("row1,1", new String((byte[]) (vector.getObject(0).get(0)))); + assertEquals("row1,2", new String((byte[]) (vector.getObject(0).get(1)))); + assertEquals("row1,3", new String((byte[]) (vector.getObject(0).get(2)))); + assertEquals("row1,4", new String((byte[]) (vector.getObject(0).get(3)))); + } + } + + @Test + public void testWriteLargeVarBinaryHelpers() throws Exception { + try (final FixedSizeListVector vector = FixedSizeListVector.empty("vector", /*listSize=*/4, allocator)) { + + UnionFixedSizeListWriter writer = vector.getWriter(); + writer.allocate(); + + writer.startList(); + writer.writeLargeVarBinary("row1,1".getBytes()); + writer.writeLargeVarBinary("row1,2".getBytes(), 0, "row1,2".getBytes().length); + writer.writeLargeVarBinary(ByteBuffer.wrap("row1,3".getBytes())); + writer.writeLargeVarBinary(ByteBuffer.wrap("row1,4".getBytes()), 0, "row1,4".getBytes().length); + writer.endList(); + + assertEquals("row1,1", new String((byte[]) (vector.getObject(0).get(0)))); + assertEquals("row1,2", new String((byte[]) (vector.getObject(0).get(1)))); + assertEquals("row1,3", new String((byte[]) (vector.getObject(0).get(2)))); + assertEquals("row1,4", new String((byte[]) (vector.getObject(0).get(3)))); + } + } + + private int[] convertListToIntArray(List list) { int[] values = new int[list.size()]; for (int i = 0; i < list.size(); i++) { values[i] = (int) list.get(i); diff --git a/java/vector/src/test/java/org/apache/arrow/vector/TestMapVector.java b/java/vector/src/test/java/org/apache/arrow/vector/TestMapVector.java index 5c8fd55ec98dc..1db55198e4bb3 100644 --- a/java/vector/src/test/java/org/apache/arrow/vector/TestMapVector.java +++ b/java/vector/src/test/java/org/apache/arrow/vector/TestMapVector.java @@ -1155,4 +1155,27 @@ public void testGetTransferPairWithField() { toVector.clear(); } } + + @Test + public void testGetTransferPairWithFieldAndCallBack() { + SchemaChangeCallBack callBack = new SchemaChangeCallBack(); + try (MapVector mapVector = MapVector.empty("mapVector", allocator, false)) { + + FieldType type = new FieldType(false, ArrowType.Struct.INSTANCE, null, null); + AddOrGetResult addResult = mapVector.addOrGetVector(type); + FieldType keyType = new FieldType(false, MinorType.BIGINT.getType(), null, null); + FieldType valueType = FieldType.nullable(MinorType.FLOAT8.getType()); + addResult.getVector().addOrGet(MapVector.KEY_NAME, keyType, BigIntVector.class); + addResult.getVector().addOrGet(MapVector.VALUE_NAME, valueType, Float8Vector.class); + mapVector.allocateNew(); + mapVector.setValueCount(0); + + assertEquals(-1, mapVector.getLastSet()); + TransferPair tp = mapVector.getTransferPair(mapVector.getField(), allocator, callBack); + tp.transfer(); + MapVector toVector = (MapVector) tp.getTo(); + assertSame(toVector.getField(), mapVector.getField()); + toVector.clear(); + } + } } diff --git a/java/vector/src/test/java/org/apache/arrow/vector/TestStructVector.java b/java/vector/src/test/java/org/apache/arrow/vector/TestStructVector.java index ee34f203b6320..68f5e14dabb9b 100644 --- a/java/vector/src/test/java/org/apache/arrow/vector/TestStructVector.java +++ b/java/vector/src/test/java/org/apache/arrow/vector/TestStructVector.java @@ -307,6 +307,18 @@ public void testGetTransferPair() { } } + @Test + public void testGetTransferPairWithFieldAndCallBack() { + SchemaChangeCallBack callBack = new SchemaChangeCallBack(); + try (final StructVector fromVector = simpleStructVector("s1", allocator)) { + TransferPair tp = fromVector.getTransferPair(fromVector.getField(), allocator, callBack); + final StructVector toVector = (StructVector) tp.getTo(); + // Field inside a new vector created by reusing a field should be the same in memory as the original field. + assertSame(toVector.getField(), fromVector.getField()); + toVector.clear(); + } + } + private StructVector simpleStructVector(String name, BufferAllocator allocator) { final String INT_COL = "struct_int_child"; final String FLT_COL = "struct_flt_child"; diff --git a/java/vector/src/test/java/org/apache/arrow/vector/complex/impl/TestPromotableWriter.java b/java/vector/src/test/java/org/apache/arrow/vector/complex/impl/TestPromotableWriter.java index 1068f7c030eb5..4c8c96a0d74d3 100644 --- a/java/vector/src/test/java/org/apache/arrow/vector/complex/impl/TestPromotableWriter.java +++ b/java/vector/src/test/java/org/apache/arrow/vector/complex/impl/TestPromotableWriter.java @@ -28,6 +28,10 @@ import org.apache.arrow.memory.ArrowBuf; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.vector.DirtyRootAllocator; +import org.apache.arrow.vector.LargeVarBinaryVector; +import org.apache.arrow.vector.LargeVarCharVector; +import org.apache.arrow.vector.VarBinaryVector; +import org.apache.arrow.vector.VarCharVector; import org.apache.arrow.vector.complex.ListVector; import org.apache.arrow.vector.complex.NonNullableStructVector; import org.apache.arrow.vector.complex.StructVector; @@ -43,6 +47,7 @@ import org.apache.arrow.vector.types.pojo.ArrowType.ArrowTypeID; import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.FieldType; +import org.apache.arrow.vector.util.Text; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -392,4 +397,196 @@ public void testNoPromoteFixedSizeBinaryToUnionWithNull() throws Exception { buf.close(); } } + + @Test + public void testPromoteLargeVarCharHelpersOnStruct() throws Exception { + try (final NonNullableStructVector container = NonNullableStructVector.empty(EMPTY_SCHEMA_PATH, allocator); + final StructVector v = container.addOrGetStruct("test"); + final PromotableWriter writer = new PromotableWriter(v, container)) { + container.allocateNew(); + + writer.start(); + writer.setPosition(0); + writer.largeVarChar("c").writeLargeVarChar(new Text("foo")); + writer.setPosition(1); + writer.largeVarChar("c").writeLargeVarChar("foo2"); + writer.end(); + + final LargeVarCharVector uv = v.getChild("c", LargeVarCharVector.class); + assertEquals("foo", uv.getObject(0).toString()); + assertEquals("foo2", uv.getObject(1).toString()); + } + } + + @Test + public void testPromoteVarCharHelpersOnStruct() throws Exception { + try (final NonNullableStructVector container = NonNullableStructVector.empty(EMPTY_SCHEMA_PATH, allocator); + final StructVector v = container.addOrGetStruct("test"); + final PromotableWriter writer = new PromotableWriter(v, container)) { + container.allocateNew(); + + writer.start(); + writer.setPosition(0); + writer.varChar("c").writeVarChar(new Text("foo")); + writer.setPosition(1); + writer.varChar("c").writeVarChar("foo2"); + writer.end(); + + final VarCharVector uv = v.getChild("c", VarCharVector.class); + assertEquals("foo", uv.getObject(0).toString()); + assertEquals("foo2", uv.getObject(1).toString()); + } + } + + @Test + public void testPromoteVarCharHelpersDirect() throws Exception { + try (final NonNullableStructVector container = NonNullableStructVector.empty(EMPTY_SCHEMA_PATH, allocator); + final StructVector v = container.addOrGetStruct("test"); + final PromotableWriter writer = new PromotableWriter(v, container)) { + container.allocateNew(); + + writer.start(); + writer.setPosition(0); + writer.writeVarChar(new Text("foo")); + writer.setPosition(1); + writer.writeVarChar("foo2"); + writer.end(); + + // The "test" vector in the parent container should have been replaced with a UnionVector. + UnionVector promotedVector = container.getChild("test", UnionVector.class); + VarCharVector vector = promotedVector.getVarCharVector(); + assertEquals("foo", vector.getObject(0).toString()); + assertEquals("foo2", vector.getObject(1).toString()); + } + } + + @Test + public void testPromoteLargeVarCharHelpersDirect() throws Exception { + try (final NonNullableStructVector container = NonNullableStructVector.empty(EMPTY_SCHEMA_PATH, allocator); + final StructVector v = container.addOrGetStruct("test"); + final PromotableWriter writer = new PromotableWriter(v, container)) { + container.allocateNew(); + + writer.start(); + writer.setPosition(0); + writer.writeLargeVarChar(new Text("foo")); + writer.setPosition(1); + writer.writeLargeVarChar("foo2"); + writer.end(); + + // The "test" vector in the parent container should have been replaced with a UnionVector. + UnionVector promotedVector = container.getChild("test", UnionVector.class); + LargeVarCharVector vector = promotedVector.getLargeVarCharVector(); + assertEquals("foo", vector.getObject(0).toString()); + assertEquals("foo2", vector.getObject(1).toString()); + } + } + + @Test + public void testPromoteVarBinaryHelpersOnStruct() throws Exception { + try (final NonNullableStructVector container = NonNullableStructVector.empty(EMPTY_SCHEMA_PATH, allocator); + final StructVector v = container.addOrGetStruct("test"); + final PromotableWriter writer = new PromotableWriter(v, container)) { + container.allocateNew(); + + writer.start(); + writer.setPosition(0); + writer.varBinary("c").writeVarBinary("row1".getBytes()); + writer.setPosition(1); + writer.varBinary("c").writeVarBinary("row2".getBytes(), 0, "row2".getBytes().length); + writer.setPosition(2); + writer.varBinary("c").writeVarBinary(ByteBuffer.wrap("row3".getBytes())); + writer.setPosition(3); + writer.varBinary("c").writeVarBinary(ByteBuffer.wrap("row4".getBytes()), 0, "row4".getBytes().length); + writer.end(); + + final VarBinaryVector uv = v.getChild("c", VarBinaryVector.class); + assertEquals("row1", new String(uv.get(0))); + assertEquals("row2", new String(uv.get(1))); + assertEquals("row3", new String(uv.get(2))); + assertEquals("row4", new String(uv.get(3))); + } + } + + @Test + public void testPromoteVarBinaryHelpersDirect() throws Exception { + try (final NonNullableStructVector container = NonNullableStructVector.empty(EMPTY_SCHEMA_PATH, allocator); + final StructVector v = container.addOrGetStruct("test"); + final PromotableWriter writer = new PromotableWriter(v, container)) { + container.allocateNew(); + + writer.start(); + writer.setPosition(0); + writer.writeVarBinary("row1".getBytes()); + writer.setPosition(1); + writer.writeVarBinary("row2".getBytes(), 0, "row2".getBytes().length); + writer.setPosition(2); + writer.writeVarBinary(ByteBuffer.wrap("row3".getBytes())); + writer.setPosition(3); + writer.writeVarBinary(ByteBuffer.wrap("row4".getBytes()), 0, "row4".getBytes().length); + writer.end(); + + // The "test" vector in the parent container should have been replaced with a UnionVector. + UnionVector promotedVector = container.getChild("test", UnionVector.class); + VarBinaryVector uv = promotedVector.getVarBinaryVector(); + assertEquals("row1", new String(uv.get(0))); + assertEquals("row2", new String(uv.get(1))); + assertEquals("row3", new String(uv.get(2))); + assertEquals("row4", new String(uv.get(3))); + } + } + + @Test + public void testPromoteLargeVarBinaryHelpersOnStruct() throws Exception { + try (final NonNullableStructVector container = NonNullableStructVector.empty(EMPTY_SCHEMA_PATH, allocator); + final StructVector v = container.addOrGetStruct("test"); + final PromotableWriter writer = new PromotableWriter(v, container)) { + container.allocateNew(); + + writer.start(); + writer.setPosition(0); + writer.largeVarBinary("c").writeLargeVarBinary("row1".getBytes()); + writer.setPosition(1); + writer.largeVarBinary("c").writeLargeVarBinary("row2".getBytes(), 0, "row2".getBytes().length); + writer.setPosition(2); + writer.largeVarBinary("c").writeLargeVarBinary(ByteBuffer.wrap("row3".getBytes())); + writer.setPosition(3); + writer.largeVarBinary("c").writeLargeVarBinary(ByteBuffer.wrap("row4".getBytes()), 0, "row4".getBytes().length); + writer.end(); + + final LargeVarBinaryVector uv = v.getChild("c", LargeVarBinaryVector.class); + assertEquals("row1", new String(uv.get(0))); + assertEquals("row2", new String(uv.get(1))); + assertEquals("row3", new String(uv.get(2))); + assertEquals("row4", new String(uv.get(3))); + } + } + + @Test + public void testPromoteLargeVarBinaryHelpersDirect() throws Exception { + try (final NonNullableStructVector container = NonNullableStructVector.empty(EMPTY_SCHEMA_PATH, allocator); + final StructVector v = container.addOrGetStruct("test"); + final PromotableWriter writer = new PromotableWriter(v, container)) { + container.allocateNew(); + + writer.start(); + writer.setPosition(0); + writer.writeLargeVarBinary("row1".getBytes()); + writer.setPosition(1); + writer.writeLargeVarBinary("row2".getBytes(), 0, "row2".getBytes().length); + writer.setPosition(2); + writer.writeLargeVarBinary(ByteBuffer.wrap("row3".getBytes())); + writer.setPosition(3); + writer.writeLargeVarBinary(ByteBuffer.wrap("row4".getBytes()), 0, "row4".getBytes().length); + writer.end(); + + // The "test" vector in the parent container should have been replaced with a UnionVector. + UnionVector promotedVector = container.getChild("test", UnionVector.class); + LargeVarBinaryVector uv = promotedVector.getLargeVarBinaryVector(); + assertEquals("row1", new String(uv.get(0))); + assertEquals("row2", new String(uv.get(1))); + assertEquals("row3", new String(uv.get(2))); + assertEquals("row4", new String(uv.get(3))); + } + } } diff --git a/java/vector/src/test/java/org/apache/arrow/vector/complex/writer/TestComplexWriter.java b/java/vector/src/test/java/org/apache/arrow/vector/complex/writer/TestComplexWriter.java index 6f7f5abd30ac9..96d39e85f1f4a 100644 --- a/java/vector/src/test/java/org/apache/arrow/vector/complex/writer/TestComplexWriter.java +++ b/java/vector/src/test/java/org/apache/arrow/vector/complex/writer/TestComplexWriter.java @@ -20,6 +20,7 @@ import static org.junit.Assert.*; import java.math.BigDecimal; +import java.nio.ByteBuffer; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.HashSet; @@ -35,7 +36,11 @@ import org.apache.arrow.vector.Float4Vector; import org.apache.arrow.vector.Float8Vector; import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.LargeVarBinaryVector; +import org.apache.arrow.vector.LargeVarCharVector; import org.apache.arrow.vector.SchemaChangeCallBack; +import org.apache.arrow.vector.VarBinaryVector; +import org.apache.arrow.vector.VarCharVector; import org.apache.arrow.vector.complex.ListVector; import org.apache.arrow.vector.complex.MapVector; import org.apache.arrow.vector.complex.NonNullableStructVector; @@ -1667,4 +1672,210 @@ public void testMapWithStructKey() { assertEquals(1, mapReader.value().readInteger().intValue()); } } + + @Test + public void structWriterVarCharHelpers() { + try (NonNullableStructVector parent = NonNullableStructVector.empty("parent", allocator)) { + ComplexWriter writer = new ComplexWriterImpl("root", parent, false, true); + StructWriter rootWriter = writer.rootAsStruct(); + rootWriter.start(); + rootWriter.setPosition(0); + rootWriter.varChar("c").writeVarChar(new Text("row1")); + rootWriter.setPosition(1); + rootWriter.varChar("c").writeVarChar("row2"); + rootWriter.end(); + + VarCharVector vector = parent.getChild("root", StructVector.class).getChild("c", VarCharVector.class); + + assertEquals("row1", vector.getObject(0).toString()); + assertEquals("row2", vector.getObject(1).toString()); + } + } + + @Test + public void structWriterLargeVarCharHelpers() { + try (NonNullableStructVector parent = NonNullableStructVector.empty("parent", allocator)) { + ComplexWriter writer = new ComplexWriterImpl("root", parent, false, true); + StructWriter rootWriter = writer.rootAsStruct(); + rootWriter.start(); + rootWriter.setPosition(0); + rootWriter.largeVarChar("c").writeLargeVarChar(new Text("row1")); + rootWriter.setPosition(1); + rootWriter.largeVarChar("c").writeLargeVarChar("row2"); + rootWriter.end(); + + LargeVarCharVector vector = parent.getChild("root", StructVector.class).getChild("c", + LargeVarCharVector.class); + + assertEquals("row1", vector.getObject(0).toString()); + assertEquals("row2", vector.getObject(1).toString()); + } + } + + @Test + public void structWriterVarBinaryHelpers() { + try (NonNullableStructVector parent = NonNullableStructVector.empty("parent", allocator)) { + ComplexWriter writer = new ComplexWriterImpl("root", parent, false, true); + StructWriter rootWriter = writer.rootAsStruct(); + rootWriter.start(); + rootWriter.setPosition(0); + rootWriter.varBinary("c").writeVarBinary("row1".getBytes()); + rootWriter.setPosition(1); + rootWriter.varBinary("c").writeVarBinary("row2".getBytes(), 0, "row2".getBytes().length); + rootWriter.setPosition(2); + rootWriter.varBinary("c").writeVarBinary(ByteBuffer.wrap("row3".getBytes())); + rootWriter.setPosition(3); + rootWriter.varBinary("c").writeVarBinary(ByteBuffer.wrap("row4".getBytes()), 0, "row4".getBytes().length); + rootWriter.end(); + + VarBinaryVector uv = parent.getChild("root", StructVector.class).getChild("c", VarBinaryVector.class); + + assertEquals("row1", new String(uv.get(0))); + assertEquals("row2", new String(uv.get(1))); + assertEquals("row3", new String(uv.get(2))); + assertEquals("row4", new String(uv.get(3))); + } + } + + @Test + public void structWriterLargeVarBinaryHelpers() { + try (NonNullableStructVector parent = NonNullableStructVector.empty("parent", allocator)) { + ComplexWriter writer = new ComplexWriterImpl("root", parent, false, true); + StructWriter rootWriter = writer.rootAsStruct(); + rootWriter.start(); + rootWriter.setPosition(0); + rootWriter.largeVarBinary("c").writeLargeVarBinary("row1".getBytes()); + rootWriter.setPosition(1); + rootWriter.largeVarBinary("c").writeLargeVarBinary("row2".getBytes(), 0, "row2".getBytes().length); + rootWriter.setPosition(2); + rootWriter.largeVarBinary("c").writeLargeVarBinary(ByteBuffer.wrap("row3".getBytes())); + rootWriter.setPosition(3); + rootWriter.largeVarBinary("c").writeLargeVarBinary(ByteBuffer.wrap("row4".getBytes()), 0, + "row4".getBytes().length); + rootWriter.end(); + + LargeVarBinaryVector uv = parent.getChild("root", StructVector.class).getChild("c", + LargeVarBinaryVector.class); + + assertEquals("row1", new String(uv.get(0))); + assertEquals("row2", new String(uv.get(1))); + assertEquals("row3", new String(uv.get(2))); + assertEquals("row4", new String(uv.get(3))); + } + } + + @Test + public void listVarCharHelpers() { + try (ListVector listVector = ListVector.empty("list", allocator)) { + listVector.allocateNew(); + UnionListWriter listWriter = new UnionListWriter(listVector); + listWriter.startList(); + listWriter.writeVarChar("row1"); + listWriter.writeVarChar(new Text("row2")); + listWriter.endList(); + listWriter.setValueCount(1); + assertEquals("row1", listVector.getObject(0).get(0).toString()); + assertEquals("row2", listVector.getObject(0).get(1).toString()); + } + } + + @Test + public void listLargeVarCharHelpers() { + try (ListVector listVector = ListVector.empty("list", allocator)) { + listVector.allocateNew(); + UnionListWriter listWriter = new UnionListWriter(listVector); + listWriter.startList(); + listWriter.writeLargeVarChar("row1"); + listWriter.writeLargeVarChar(new Text("row2")); + listWriter.endList(); + listWriter.setValueCount(1); + assertEquals("row1", listVector.getObject(0).get(0).toString()); + assertEquals("row2", listVector.getObject(0).get(1).toString()); + } + } + + @Test + public void listVarBinaryHelpers() { + try (ListVector listVector = ListVector.empty("list", allocator)) { + listVector.allocateNew(); + UnionListWriter listWriter = new UnionListWriter(listVector); + listWriter.startList(); + listWriter.writeVarBinary("row1".getBytes()); + listWriter.writeVarBinary("row2".getBytes(), 0, "row2".getBytes().length); + listWriter.writeVarBinary(ByteBuffer.wrap("row3".getBytes())); + listWriter.writeVarBinary(ByteBuffer.wrap("row4".getBytes()), 0, "row4".getBytes().length); + listWriter.endList(); + listWriter.setValueCount(1); + assertEquals("row1", new String((byte[]) listVector.getObject(0).get(0))); + assertEquals("row2", new String((byte[]) listVector.getObject(0).get(1))); + assertEquals("row3", new String((byte[]) listVector.getObject(0).get(2))); + assertEquals("row4", new String((byte[]) listVector.getObject(0).get(3))); + } + } + + @Test + public void listLargeVarBinaryHelpers() { + try (ListVector listVector = ListVector.empty("list", allocator)) { + listVector.allocateNew(); + UnionListWriter listWriter = new UnionListWriter(listVector); + listWriter.startList(); + listWriter.writeLargeVarBinary("row1".getBytes()); + listWriter.writeLargeVarBinary("row2".getBytes(), 0, "row2".getBytes().length); + listWriter.writeLargeVarBinary(ByteBuffer.wrap("row3".getBytes())); + listWriter.writeLargeVarBinary(ByteBuffer.wrap("row4".getBytes()), 0, "row4".getBytes().length); + listWriter.endList(); + listWriter.setValueCount(1); + assertEquals("row1", new String((byte[]) listVector.getObject(0).get(0))); + assertEquals("row2", new String((byte[]) listVector.getObject(0).get(1))); + assertEquals("row3", new String((byte[]) listVector.getObject(0).get(2))); + assertEquals("row4", new String((byte[]) listVector.getObject(0).get(3))); + } + } + + @Test + public void unionWithVarCharAndBinaryHelpers() throws Exception { + try (UnionVector vector = new UnionVector("union", allocator, /* field type */ null, /* call-back */ null)) { + UnionWriter unionWriter = new UnionWriter(vector); + unionWriter.allocate(); + unionWriter.start(); + unionWriter.setPosition(0); + unionWriter.writeVarChar("row1"); + unionWriter.setPosition(1); + unionWriter.writeVarChar(new Text("row2")); + unionWriter.setPosition(2); + unionWriter.writeLargeVarChar("row3"); + unionWriter.setPosition(3); + unionWriter.writeLargeVarChar(new Text("row4")); + unionWriter.setPosition(4); + unionWriter.writeVarBinary("row5".getBytes()); + unionWriter.setPosition(5); + unionWriter.writeVarBinary("row6".getBytes(), 0, "row6".getBytes().length); + unionWriter.setPosition(6); + unionWriter.writeVarBinary(ByteBuffer.wrap("row7".getBytes())); + unionWriter.setPosition(7); + unionWriter.writeVarBinary(ByteBuffer.wrap("row8".getBytes()), 0, "row8".getBytes().length); + unionWriter.setPosition(8); + unionWriter.writeLargeVarBinary("row9".getBytes()); + unionWriter.setPosition(9); + unionWriter.writeLargeVarBinary("row10".getBytes(), 0, "row10".getBytes().length); + unionWriter.setPosition(10); + unionWriter.writeLargeVarBinary(ByteBuffer.wrap("row11".getBytes())); + unionWriter.setPosition(11); + unionWriter.writeLargeVarBinary(ByteBuffer.wrap("row12".getBytes()), 0, "row12".getBytes().length); + unionWriter.end(); + + assertEquals("row1", new String(vector.getVarCharVector().get(0))); + assertEquals("row2", new String(vector.getVarCharVector().get(1))); + assertEquals("row3", new String(vector.getLargeVarCharVector().get(2))); + assertEquals("row4", new String(vector.getLargeVarCharVector().get(3))); + assertEquals("row5", new String(vector.getVarBinaryVector().get(4))); + assertEquals("row6", new String(vector.getVarBinaryVector().get(5))); + assertEquals("row7", new String(vector.getVarBinaryVector().get(6))); + assertEquals("row8", new String(vector.getVarBinaryVector().get(7))); + assertEquals("row9", new String(vector.getLargeVarBinaryVector().get(8))); + assertEquals("row10", new String(vector.getLargeVarBinaryVector().get(9))); + assertEquals("row11", new String(vector.getLargeVarBinaryVector().get(10))); + assertEquals("row12", new String(vector.getLargeVarBinaryVector().get(11))); + } + } } diff --git a/java/vector/src/test/java/org/apache/arrow/vector/complex/writer/TestSimpleWriter.java b/java/vector/src/test/java/org/apache/arrow/vector/complex/writer/TestSimpleWriter.java index ef918b13fb691..27b8f1796ee31 100644 --- a/java/vector/src/test/java/org/apache/arrow/vector/complex/writer/TestSimpleWriter.java +++ b/java/vector/src/test/java/org/apache/arrow/vector/complex/writer/TestSimpleWriter.java @@ -54,7 +54,7 @@ public void testWriteByteArrayToVarBinary() throws Exception { try (VarBinaryVector vector = new VarBinaryVector("test", allocator); VarBinaryWriter writer = new VarBinaryWriterImpl(vector)) { byte[] input = new byte[] { 0x01, 0x02 }; - writer.writeToVarBinary(input); + writer.writeVarBinary(input); byte[] result = vector.get(0); Assert.assertArrayEquals(input, result); } @@ -65,7 +65,7 @@ public void testWriteByteArrayWithOffsetToVarBinary() throws Exception { try (VarBinaryVector vector = new VarBinaryVector("test", allocator); VarBinaryWriter writer = new VarBinaryWriterImpl(vector)) { byte[] input = new byte[] { 0x01, 0x02 }; - writer.writeToVarBinary(input, 1, 1); + writer.writeVarBinary(input, 1, 1); byte[] result = vector.get(0); Assert.assertArrayEquals(new byte[] { 0x02 }, result); } @@ -77,7 +77,7 @@ public void testWriteByteBufferToVarBinary() throws Exception { VarBinaryWriter writer = new VarBinaryWriterImpl(vector)) { byte[] input = new byte[] { 0x01, 0x02 }; ByteBuffer buffer = ByteBuffer.wrap(input); - writer.writeToVarBinary(buffer); + writer.writeVarBinary(buffer); byte[] result = vector.get(0); Assert.assertArrayEquals(input, result); } @@ -89,7 +89,7 @@ public void testWriteByteBufferWithOffsetToVarBinary() throws Exception { VarBinaryWriter writer = new VarBinaryWriterImpl(vector)) { byte[] input = new byte[] { 0x01, 0x02 }; ByteBuffer buffer = ByteBuffer.wrap(input); - writer.writeToVarBinary(buffer, 1, 1); + writer.writeVarBinary(buffer, 1, 1); byte[] result = vector.get(0); Assert.assertArrayEquals(new byte[] { 0x02 }, result); } @@ -100,7 +100,7 @@ public void testWriteByteArrayToLargeVarBinary() throws Exception { try (LargeVarBinaryVector vector = new LargeVarBinaryVector("test", allocator); LargeVarBinaryWriter writer = new LargeVarBinaryWriterImpl(vector)) { byte[] input = new byte[] { 0x01, 0x02 }; - writer.writeToLargeVarBinary(input); + writer.writeLargeVarBinary(input); byte[] result = vector.get(0); Assert.assertArrayEquals(input, result); } @@ -111,7 +111,7 @@ public void testWriteByteArrayWithOffsetToLargeVarBinary() throws Exception { try (LargeVarBinaryVector vector = new LargeVarBinaryVector("test", allocator); LargeVarBinaryWriter writer = new LargeVarBinaryWriterImpl(vector)) { byte[] input = new byte[] { 0x01, 0x02 }; - writer.writeToLargeVarBinary(input, 1, 1); + writer.writeLargeVarBinary(input, 1, 1); byte[] result = vector.get(0); Assert.assertArrayEquals(new byte[] { 0x02 }, result); } @@ -123,7 +123,7 @@ public void testWriteByteBufferToLargeVarBinary() throws Exception { LargeVarBinaryWriter writer = new LargeVarBinaryWriterImpl(vector)) { byte[] input = new byte[] { 0x01, 0x02 }; ByteBuffer buffer = ByteBuffer.wrap(input); - writer.writeToLargeVarBinary(buffer); + writer.writeLargeVarBinary(buffer); byte[] result = vector.get(0); Assert.assertArrayEquals(input, result); } @@ -135,7 +135,7 @@ public void testWriteByteBufferWithOffsetToLargeVarBinary() throws Exception { LargeVarBinaryWriter writer = new LargeVarBinaryWriterImpl(vector)) { byte[] input = new byte[] { 0x01, 0x02 }; ByteBuffer buffer = ByteBuffer.wrap(input); - writer.writeToLargeVarBinary(buffer, 1, 1); + writer.writeLargeVarBinary(buffer, 1, 1); byte[] result = vector.get(0); Assert.assertArrayEquals(new byte[] { 0x02 }, result); } diff --git a/java/vector/src/test/java/org/apache/arrow/vector/util/TestMapWithOrdinal.java b/java/vector/src/test/java/org/apache/arrow/vector/util/TestMapWithOrdinal.java new file mode 100644 index 0000000000000..edd5221faf268 --- /dev/null +++ b/java/vector/src/test/java/org/apache/arrow/vector/util/TestMapWithOrdinal.java @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.vector.util; + +import static junit.framework.TestCase.assertNull; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.util.Collection; + +import org.junit.Before; +import org.junit.Test; + +public class TestMapWithOrdinal { + + private MapWithOrdinal map; + + @Before + public void setUp() { + map = new MapWithOrdinalImpl<>(); + } + + @Test + public void testGetByOrdinal() { + map.put("key0", "val0", true); + assertEquals("val0", map.getByOrdinal(0)); + + map.put("key1", "val1", true); + assertEquals("val1", map.getByOrdinal(1)); + assertEquals("val0", map.getByOrdinal(0)); + } + + @Test + public void testGetByKey() { + map.put("key0", "val0", true); + assertEquals("val0", map.get("key0")); + + map.put("key1", "val1", true); + assertEquals("val1", map.get("key1")); + assertEquals("val0", map.get("key0")); + } + + @Test + public void testInvalidOrdinal() { + map.put("key0", "val0", true); + assertNull(map.getByOrdinal(1)); + + map.removeAll("key0"); + assertNull(map.getByOrdinal(0)); + } + + @Test + public void testInvalidKey() { + MapWithOrdinalImpl map = new MapWithOrdinalImpl<>(); + map.put("key0", "val0", true); + assertNull(map.get("fake_key")); + + map.removeAll("key0"); + assertNull(map.get("key0")); + } + + @Test + public void testValues() { + map.put("key0", "val0", true); + map.put("key1", "val1", true); + + Collection values = map.values(); + assertTrue(values.contains("val0")); + assertTrue(values.contains("val1")); + + map.put("key1", "new_val1", true); + values = map.values(); + assertTrue(values.contains("val0")); + assertTrue(values.contains("new_val1")); + assertFalse(values.contains("val1")); + + map.removeAll("key0"); + assertTrue(values.contains("new_val1")); + assertFalse(values.contains("val0")); + } +} diff --git a/js/.vscode/launch.json b/js/.vscode/launch.json index 7d169ccb26274..21255b61c39a5 100644 --- a/js/.vscode/launch.json +++ b/js/.vscode/launch.json @@ -31,7 +31,7 @@ "args": { "cwd": "${workspaceFolder}", "description": "Select a file to debug", - "command": "./node_modules/.bin/jest --listTests | sed -r \"s@$PWD/test/@@g\"", + "command": "node_modules/.bin/jest --listTests | sed -r \"s@$PWD/test/@@g\"", } }, { @@ -98,25 +98,23 @@ "request": "launch", "name": "Debug Integration Tests", "cwd": "${workspaceFolder}", - "program": "${workspaceFolder}/bin/integration.js", + "program": "${workspaceFolder}/bin/integration.ts", "skipFiles": [ "/**/*.js", "${workspaceFolder}/node_modules/**/*.js" ], "env": { "NODE_NO_WARNINGS": "1", - "ARROW_JS_DEBUG": "src", "TS_NODE_CACHE": "false" }, "runtimeArgs": [ - "-r", - "ts-node/register" + "--loader", "ts-node/esm/transpile-only" ], "args": [ "--mode", "VALIDATE", "-j", "test/data/json/unions.json", - "-a", "./test/data/cpp/stream/struct_example.arrow" + "-a", "test/data/cpp/stream/unions.arrow" ] }, { @@ -140,8 +138,7 @@ "${workspaceFolder}/node_modules/**/*.js" ], "runtimeArgs": [ - "--loader", - "ts-node/esm/transpile-only" + "--loader", "ts-node/esm/transpile-only" ] }, { @@ -150,12 +147,10 @@ "name": "Debug bin/arrow2csv", "cwd": "${workspaceFolder}", "env": { - "ARROW_JS_DEBUG": "src", "TS_NODE_CACHE": "false" }, "runtimeArgs": [ - "-r", - "ts-node/register" + "--loader", "ts-node/esm/transpile-only" ], "console": "integratedTerminal", "skipFiles": [ @@ -165,7 +160,7 @@ "args": [ "${workspaceFolder}/src/bin/arrow2csv.ts", "-f", - "./test/data/cpp/stream/simple.arrow" + "test/data/cpp/stream/simple.arrow" ] }, { @@ -174,21 +169,19 @@ "name": "Debug bin/file-to-stream", "cwd": "${workspaceFolder}", "env": { - "ARROW_JS_DEBUG": "src", "TS_NODE_CACHE": "false" }, "runtimeArgs": [ - "-r", - "ts-node/register" + "--loader", "ts-node/esm/transpile-only" ], "skipFiles": [ "/**/*.js", "${workspaceFolder}/node_modules/**/*.js" ], "args": [ - "${workspaceFolder}/bin/file-to-stream.js", - "./test/data/cpp/file/struct_example.arrow", - "./struct_example-stream-out.arrow", + "${workspaceFolder}/bin/file-to-stream.ts", + "test/data/cpp/file/struct_example.arrow", + "struct_example-stream-out.arrow", ] }, { @@ -197,21 +190,19 @@ "name": "Debug bin/stream-to-file", "cwd": "${workspaceFolder}", "env": { - "ARROW_JS_DEBUG": "src", "TS_NODE_CACHE": "false" }, "runtimeArgs": [ - "-r", - "ts-node/register" + "--loader", "ts-node/esm/transpile-only" ], "skipFiles": [ "/**/*.js", "${workspaceFolder}/node_modules/**/*.js" ], "args": [ - "${workspaceFolder}/bin/stream-to-file.js", - "./test/data/cpp/stream/struct_example.arrow", - "./struct_example-file-out.arrow", + "${workspaceFolder}/bin/stream-to-file.ts", + "test/data/cpp/stream/struct_example.arrow", + "struct_example-file-out.arrow", ] }, { @@ -220,23 +211,21 @@ "name": "Debug bin/json-to-arrow", "cwd": "${workspaceFolder}", "env": { - "ARROW_JS_DEBUG": "src", "TS_NODE_CACHE": "false" }, "runtimeArgs": [ - "-r", - "ts-node/register" + "--loader", "ts-node/esm/transpile-only" ], "skipFiles": [ "/**/*.js", "${workspaceFolder}/node_modules/**/*.js" ], "args": [ - "${workspaceFolder}/bin/json-to-arrow.js", + "${workspaceFolder}/bin/json-to-arrow.ts", "-j", - "./test/data/json/struct_example.json", + "test/data/json/struct_example.json", "-a", - "./struct_example-stream-out.arrow", + "struct_example-stream-out.arrow", "-f", "stream" ] @@ -247,20 +236,18 @@ "name": "Debug bin/print-buffer-alignment", "cwd": "${workspaceFolder}", "env": { - "ARROW_JS_DEBUG": "src", "TS_NODE_CACHE": "false" }, "runtimeArgs": [ - "-r", - "ts-node/register" + "--loader", "ts-node/esm/transpile-only" ], "skipFiles": [ "/**/*.js", "${workspaceFolder}/node_modules/**/*.js" ], "args": [ - "${workspaceFolder}/bin/print-buffer-alignment.js", - "./test/data/cpp/stream/struct_example.arrow" + "${workspaceFolder}/bin/print-buffer-alignment.ts", + "test/data/cpp/stream/struct_example.arrow" ] }, { diff --git a/js/README.md b/js/README.md index f626ef7240fca..f3dd3fef890f2 100644 --- a/js/README.md +++ b/js/README.md @@ -151,7 +151,7 @@ Strings can be encoded as UTF-8 or dictionary encoded UTF-8. Dictionary encoding ```js import { makeVector, vectorFromArray, Dictionary, Uint8, Utf8 } from "apache-arrow"; -const uft8Vector = vectorFromArray(['foo', 'bar', 'baz'], new Utf8); +const utf8Vector = vectorFromArray(['foo', 'bar', 'baz'], new Utf8); const dictionaryVector1 = vectorFromArray( ['foo', 'bar', 'baz', 'foo', 'bar'] @@ -159,7 +159,7 @@ const dictionaryVector1 = vectorFromArray( const dictionaryVector2 = makeVector({ data: [0, 1, 2, 0, 1], // indexes into the dictionary - dictionary: uft8Vector, + dictionary: utf8Vector, type: new Dictionary(new Utf8, new Uint8) }); ``` diff --git a/js/bin/arrow2csv.js b/js/bin/arrow2csv.cjs similarity index 100% rename from js/bin/arrow2csv.js rename to js/bin/arrow2csv.cjs diff --git a/js/bin/file-to-stream.js b/js/bin/file-to-stream.ts similarity index 73% rename from js/bin/file-to-stream.js rename to js/bin/file-to-stream.ts index 7fab54e2cb09f..9dad4951f96e1 100755 --- a/js/bin/file-to-stream.js +++ b/js/bin/file-to-stream.ts @@ -1,4 +1,4 @@ -#! /usr/bin/env node +#! /usr/bin/env -S node --no-warnings --loader ts-node/esm/transpile-only // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file @@ -17,18 +17,15 @@ // specific language governing permissions and limitations // under the License. -// @ts-check - -const fs = require('fs'); -const path = require('path'); -const eos = require('util').promisify(require('stream').finished); -const extension = process.env.ARROW_JS_DEBUG === 'src' ? '.ts' : '.cjs'; -const { RecordBatchReader, RecordBatchStreamWriter } = require(`../index${extension}`); +import * as fs from 'fs'; +import * as Path from 'path'; +import { finished as eos } from 'stream/promises'; +import { RecordBatchReader, RecordBatchStreamWriter } from '../index.ts'; (async () => { - const readable = process.argv.length < 3 ? process.stdin : fs.createReadStream(path.resolve(process.argv[2])); - const writable = process.argv.length < 4 ? process.stdout : fs.createWriteStream(path.resolve(process.argv[3])); + const readable = process.argv.length < 3 ? process.stdin : fs.createReadStream(Path.resolve(process.argv[2])); + const writable = process.argv.length < 4 ? process.stdout : fs.createWriteStream(Path.resolve(process.argv[3])); const fileToStream = readable .pipe(RecordBatchReader.throughNode()) diff --git a/js/bin/integration.js b/js/bin/integration.ts similarity index 76% rename from js/bin/integration.js rename to js/bin/integration.ts index 583b7a2161d7d..f9aad3422ae72 100755 --- a/js/bin/integration.js +++ b/js/bin/integration.ts @@ -1,4 +1,4 @@ -#! /usr/bin/env node +#! /usr/bin/env -S node --no-warnings --loader ts-node/esm/transpile-only // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file @@ -17,23 +17,28 @@ // specific language governing permissions and limitations // under the License. -// @ts-nocheck +import * as fs from 'fs'; +import * as Path from 'path'; +import { glob } from 'glob'; +import { zip } from 'ix/iterable/zip.js'; +import commandLineArgs from 'command-line-args'; +// @ts-ignore +import { parse as bignumJSONParse } from 'json-bignum'; -const fs = require('fs'); -const Path = require('path'); -const { glob } = require('glob'); -const { zip } = require('ix/iterable/zip'); -const { parse: bignumJSONParse } = require('json-bignum'); -const argv = require(`command-line-args`)(cliOpts(), { partial: true }); -const extension = process.env.ARROW_JS_DEBUG === 'src' ? '.ts' : '.cjs'; -const { +import { Table, + Vector, + RecordBatch, + ArrowJSONLike, RecordBatchReader, RecordBatchStreamWriter, - util: { createElementComparator } -} = require(`../index${extension}`); + util, +} from '../index.ts'; -const exists = async (p) => { +const { createElementComparator } = util; +const argv = commandLineArgs(cliOpts(), { partial: true }); + +const exists = async (p: string) => { try { return !!(await fs.promises.stat(p)); } catch (e) { return false; } @@ -60,7 +65,7 @@ const exists = async (p) => { for (let [jsonPath, arrowPath] of zip(jsonPaths, arrowPaths)) { try { await validate(jsonPath, arrowPath); - } catch (e) { + } catch (e: any) { threw = true; e && process.stderr.write(`${e?.stack || e}\n`); } @@ -108,7 +113,7 @@ function print_usage() { { header: 'Synopsis', content: [ - '$ integration.js -j file.json -a file.arrow --mode validate' + '$ integration.ts -j file.json -a file.arrow --mode validate' ] }, { @@ -125,7 +130,7 @@ function print_usage() { return 1; } -async function validate(jsonPath, arrowPath) { +async function validate(jsonPath: string, arrowPath: string) { const files = await Promise.all([ fs.promises.readFile(arrowPath), @@ -147,7 +152,7 @@ async function validate(jsonPath, arrowPath) { validateTableToBuffersIntegration('binary', 'file')(jsonData, arrowData); } -function validateReaderIntegration(jsonData, arrowBuffer) { +function validateReaderIntegration(jsonData: ArrowJSONLike, arrowBuffer: Uint8Array) { const msg = `json and arrow record batches report the same values`; try { const jsonReader = RecordBatchReader.from(jsonData); @@ -155,57 +160,57 @@ function validateReaderIntegration(jsonData, arrowBuffer) { for (const [jsonRecordBatch, binaryRecordBatch] of zip(jsonReader, binaryReader)) { compareTableIsh(jsonRecordBatch, binaryRecordBatch); } - } catch (e) { throw new Error(`${msg}: fail \n ${e?.stack || e}`); } + } catch (e: any) { throw new Error(`${msg}: fail \n ${e?.stack || e}`); } process.stdout.write(`${msg}: pass\n`); } -function validateTableFromBuffersIntegration(jsonData, arrowBuffer) { +function validateTableFromBuffersIntegration(jsonData: ArrowJSONLike, arrowBuffer: Uint8Array) { const msg = `json and arrow tables report the same values`; try { const jsonTable = new Table(RecordBatchReader.from(jsonData)); const binaryTable = new Table(RecordBatchReader.from(arrowBuffer)); compareTableIsh(jsonTable, binaryTable); - } catch (e) { throw new Error(`${msg}: fail \n ${e?.stack || e}`); } + } catch (e: any) { throw new Error(`${msg}: fail \n ${e?.stack || e}`); } process.stdout.write(`${msg}: pass\n`); } -function validateTableToBuffersIntegration(srcFormat, arrowFormat) { +function validateTableToBuffersIntegration(srcFormat: 'json' | 'binary', arrowFormat: 'file' | 'stream') { const refFormat = srcFormat === `json` ? `binary` : `json`; - return function testTableToBuffersIntegration(jsonData, arrowBuffer) { + return function testTableToBuffersIntegration(jsonData: ArrowJSONLike, arrowBuffer: Uint8Array) { const msg = `serialized ${srcFormat} ${arrowFormat} reports the same values as the ${refFormat} ${arrowFormat}`; try { - const refTable = new Table(RecordBatchReader.from(refFormat === `json` ? jsonData : arrowBuffer)); - const srcTable = new Table(RecordBatchReader.from(srcFormat === `json` ? jsonData : arrowBuffer)); + const refTable = new Table(refFormat === `json` ? RecordBatchReader.from(jsonData) : RecordBatchReader.from(arrowBuffer)); + const srcTable = new Table(srcFormat === `json` ? RecordBatchReader.from(jsonData) : RecordBatchReader.from(arrowBuffer)); const dstTable = new Table(RecordBatchReader.from(RecordBatchStreamWriter.writeAll(srcTable).toUint8Array(true))); compareTableIsh(dstTable, refTable); - } catch (e) { throw new Error(`${msg}: fail \n ${e?.stack || e}`); } + } catch (e: any) { throw new Error(`${msg}: fail \n ${e?.stack || e}`); } process.stdout.write(`${msg}: pass\n`); }; } -function compareTableIsh(actual, expected) { - if (actual.length !== expected.length) { - throw new Error(`length: ${actual.length} !== ${expected.length}`); +function compareTableIsh(actual: Table | RecordBatch, expected: Table | RecordBatch) { + if (actual.numRows !== expected.numRows) { + throw new Error(`numRows: ${actual.numRows} !== ${expected.numRows}`); } if (actual.numCols !== expected.numCols) { throw new Error(`numCols: ${actual.numCols} !== ${expected.numCols}`); } (() => { for (let i = -1, n = actual.numCols; ++i < n;) { - const v1 = actual.getChildAt(i); - const v2 = expected.getChildAt(i); + const v1 = actual.getChildAt(i)!; + const v2 = expected.getChildAt(i)!; compareVectors(v1, v2); } })(); } -function compareVectors(actual, expected) { +function compareVectors(actual: Vector, expected: Vector) { if ((actual == null && expected != null) || (expected == null && actual != null)) { throw new Error(`${actual == null ? `actual` : `expected`} is null, was expecting ${actual ?? expected} to be that also`); } - const props = ['type', 'length', 'nullCount']; + const props = ['type', 'length', 'nullCount'] as (keyof Vector & string)[]; (() => { for (let i = -1, n = props.length; ++i < n;) { @@ -236,7 +241,7 @@ function compareVectors(actual, expected) { })(); } -async function loadLocalJSONAndArrowPathsForDebugging(jsonPaths, arrowPaths) { +async function loadLocalJSONAndArrowPathsForDebugging(jsonPaths: string[], arrowPaths: string[]) { const sourceJSONPaths = await glob(Path.resolve(__dirname, `../test/data/json/`, `*.json`)); @@ -254,7 +259,13 @@ async function loadLocalJSONAndArrowPathsForDebugging(jsonPaths, arrowPaths) { return [jsonPaths, arrowPaths]; - async function loadJSONAndArrowPaths(sourceJSONPaths, jsonPaths, arrowPaths, source, format) { + async function loadJSONAndArrowPaths( + sourceJSONPaths: string[], + jsonPaths: string[], + arrowPaths: string[], + source: 'cpp' | 'java', + format: 'file' | 'stream' + ) { for (const jsonPath of sourceJSONPaths) { const { name } = Path.parse(jsonPath); const arrowPath = Path.resolve(__dirname, `../test/data/${source}/${format}/${name}.arrow`); diff --git a/js/bin/json-to-arrow.js b/js/bin/json-to-arrow.ts similarity index 82% rename from js/bin/json-to-arrow.js rename to js/bin/json-to-arrow.ts index 20442ac062358..49726706a1be6 100755 --- a/js/bin/json-to-arrow.js +++ b/js/bin/json-to-arrow.ts @@ -1,4 +1,4 @@ -#! /usr/bin/env node +#! /usr/bin/env -S node --no-warnings --loader ts-node/esm/transpile-only // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file @@ -17,16 +17,15 @@ // specific language governing permissions and limitations // under the License. -// @ts-check - -const fs = require('fs'); -const Path = require('path'); -const { parse } = require('json-bignum'); -const eos = require('util').promisify(require('stream').finished); -const extension = process.env.ARROW_JS_DEBUG === 'src' ? '.ts' : '.cjs'; -const argv = require(`command-line-args`)(cliOpts(), { partial: true }); -const { RecordBatchReader, RecordBatchFileWriter, RecordBatchStreamWriter } = require(`../index${extension}`); +import * as fs from 'fs'; +import * as Path from 'path'; +import commandLineArgs from 'command-line-args'; +import { finished as eos } from 'stream/promises'; +// @ts-ignore +import { parse as bignumJSONParse } from 'json-bignum'; +import { RecordBatchReader, RecordBatchFileWriter, RecordBatchStreamWriter } from '../index.ts'; +const argv = commandLineArgs(cliOpts(), { partial: true }); const jsonPaths = [...(argv.json || [])]; const arrowPaths = [...(argv.arrow || [])]; @@ -42,7 +41,7 @@ const arrowPaths = [...(argv.arrow || [])]; ? RecordBatchFileWriter : RecordBatchStreamWriter; - const reader = RecordBatchReader.from(parse( + const reader = RecordBatchReader.from(bignumJSONParse( await fs.promises.readFile(Path.resolve(path), 'utf8'))); const jsonToArrow = reader @@ -50,8 +49,9 @@ const arrowPaths = [...(argv.arrow || [])]; .pipe(fs.createWriteStream(arrowPaths[i])); await eos(jsonToArrow); - })); + + return undefined; })() .then((x) => x ?? 0, (e) => { e && process.stderr.write(`${e}`); @@ -90,7 +90,7 @@ function print_usage() { { header: 'Synopsis', content: [ - '$ json-to-arrow.js -j in.json -a out.arrow -f stream' + '$ json-to-arrow.ts -j in.json -a out.arrow -f stream' ] }, { diff --git a/js/bin/package.json b/js/bin/package.json deleted file mode 100644 index 6a0d2ef2aa8b2..0000000000000 --- a/js/bin/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "commonjs" -} \ No newline at end of file diff --git a/js/bin/print-buffer-alignment.js b/js/bin/print-buffer-alignment.ts similarity index 86% rename from js/bin/print-buffer-alignment.js rename to js/bin/print-buffer-alignment.ts index 0dd46c2da1c5d..07563af5a87e8 100755 --- a/js/bin/print-buffer-alignment.js +++ b/js/bin/print-buffer-alignment.ts @@ -1,4 +1,4 @@ -#! /usr/bin/env node +#! /usr/bin/env -S node --no-warnings --loader ts-node/esm/transpile-only // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file @@ -17,19 +17,14 @@ // specific language governing permissions and limitations // under the License. -// @ts-check - -const fs = require('fs'); -const path = require('path'); -const extension = process.env.ARROW_JS_DEBUG === 'src' ? '.ts' : '.cjs'; -const { VectorLoader } = process.env.ARROW_JS_DEBUG - ? require(`../src/visitor/vectorloader${extension}`) - : require(`../targets/apache-arrow/visitor/vectorloader`); -const { RecordBatch, AsyncMessageReader, makeData, Struct, Schema, Field } = require(`../index${extension}`); +import * as fs from 'fs'; +import * as Path from 'path'; +import { VectorLoader } from '../src/visitor/vectorloader.ts'; +import { RecordBatch, AsyncMessageReader, makeData, Struct, Schema, Field } from '../index.ts'; (async () => { - const readable = process.argv.length < 3 ? process.stdin : fs.createReadStream(path.resolve(process.argv[2])); + const readable = process.argv.length < 3 ? process.stdin : fs.createReadStream(Path.resolve(process.argv[2])); const reader = new AsyncMessageReader(readable); let schema, metadataLength, message; @@ -38,10 +33,13 @@ const { RecordBatch, AsyncMessageReader, makeData, Struct, Schema, Field } = req let dictionaryBatchCount = 0; while (1) { + // @ts-ignore if ((metadataLength = (await reader.readMetadataLength())).done) { break; } if (metadataLength.value === -1) { + // @ts-ignore if ((metadataLength = (await reader.readMetadataLength())).done) { break; } } + // @ts-ignore if ((message = (await reader.readMetadata(metadataLength.value))).done) { break; } if (message.value.isSchema()) { @@ -74,7 +72,7 @@ const { RecordBatch, AsyncMessageReader, makeData, Struct, Schema, Field } = req } else if (message.value.isDictionaryBatch()) { const header = message.value.header(); const bufferRegions = header.data.buffers; - const type = schema.dictionaries.get(header.id); + const type = schema!.dictionaries.get(header.id); const body = await reader.readMessageBody(message.value.bodyLength); const recordBatch = loadDictionaryBatch(header.data, body, type); console.log( @@ -98,7 +96,7 @@ const { RecordBatch, AsyncMessageReader, makeData, Struct, Schema, Field } = req })().catch((e) => { console.error(e); process.exit(1); }); -function loadRecordBatch(schema, header, body) { +function loadRecordBatch(schema: any, header: any, body: any) { const children = new VectorLoader(body, header.nodes, header.buffers, new Map()).visitMany(schema.fields); return new RecordBatch( schema, @@ -110,7 +108,7 @@ function loadRecordBatch(schema, header, body) { ); } -function loadDictionaryBatch(header, body, dictionaryType) { +function loadDictionaryBatch(header: any, body: any, dictionaryType: any) { const schema = new Schema([new Field('', dictionaryType)]); const children = new VectorLoader(body, header.nodes, header.buffers, new Map()).visitMany([dictionaryType]); return new RecordBatch( diff --git a/js/bin/stream-to-file.js b/js/bin/stream-to-file.ts similarity index 80% rename from js/bin/stream-to-file.js rename to js/bin/stream-to-file.ts index 5cd9e76f8085f..6e09ead2fde19 100755 --- a/js/bin/stream-to-file.js +++ b/js/bin/stream-to-file.ts @@ -1,4 +1,4 @@ -#! /usr/bin/env node +#! /usr/bin/env -S node --no-warnings --loader ts-node/esm/transpile-only // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file @@ -17,13 +17,10 @@ // specific language governing permissions and limitations // under the License. -// @ts-check - -const fs = require('fs'); -const path = require('path'); -const eos = require('util').promisify(require('stream').finished); -const extension = process.env.ARROW_JS_DEBUG === 'src' ? '.ts' : '.cjs'; -const { RecordBatchReader, RecordBatchFileWriter } = require(`../index${extension}`); +import * as fs from 'fs'; +import * as path from 'path'; +import { finished as eos } from 'stream/promises'; +import { RecordBatchReader, RecordBatchFileWriter } from '../index.ts'; (async () => { diff --git a/js/gulp/closure-task.js b/js/gulp/closure-task.js index e7e13c3046b37..7c0ae1c1b6cd5 100644 --- a/js/gulp/closure-task.js +++ b/js/gulp/closure-task.js @@ -56,7 +56,8 @@ export const closureTask = ((cache) => memoizeTask(cache, async function closure return await Promise.all([ runClosureCompileAsObservable().toPromise(), - compileBinFiles(target, format).toPromise() + compileBinFiles(target, format).toPromise(), + observableFromStreams(gulp.src(`${src}/**/*.d.ts`), gulp.dest(out)), // copy .d.ts files ]); function runClosureCompileAsObservable() { diff --git a/js/index.mjs b/js/index.mjs index 163f250e61641..f834ef19a0397 100644 --- a/js/index.mjs +++ b/js/index.mjs @@ -15,4 +15,4 @@ // specific language governing permissions and limitations // under the License. -export * from './targets/apache-arrow/Arrow.mjs'; +export * from './targets/apache-arrow/Arrow.node.mjs'; diff --git a/js/index.ts b/js/index.ts index cfd64bbbe9730..dab2977678aac 100644 --- a/js/index.ts +++ b/js/index.ts @@ -15,4 +15,4 @@ // specific language governing permissions and limitations // under the License. -export * from './src/Arrow.node'; \ No newline at end of file +export * from './src/Arrow.node.ts'; diff --git a/js/package.json b/js/package.json index 14f26c74d29f3..33bc4849903ef 100644 --- a/js/package.json +++ b/js/package.json @@ -2,7 +2,7 @@ "name": "apache-arrow", "description": "Apache Arrow columnar in-memory format", "bin": { - "arrow2csv": "bin/arrow2csv.js" + "arrow2csv": "bin/arrow2csv.cjs" }, "type": "module", "scripts": { @@ -11,8 +11,8 @@ "build": "cross-env NODE_NO_WARNINGS=1 gulp build", "clean": "cross-env NODE_NO_WARNINGS=1 gulp clean", "debug": "cross-env NODE_NO_WARNINGS=1 gulp debug", - "perf": "node --loader ts-node/esm/transpile-only ./perf/index.ts", - "test:integration": "node ./bin/integration.js --mode validate", + "perf": "perf/index.ts", + "test:integration": "bin/integration.ts --mode validate", "release": "./npm-release.sh", "clean:all": "yarn clean && yarn clean:testdata", "clean:testdata": "gulp clean:testdata", @@ -52,23 +52,25 @@ "jest.config.js" ], "dependencies": { - "@types/command-line-args": "5.2.0", - "@types/command-line-usage": "5.0.2", - "@types/node": "20.3.0", - "@types/pad-left": "2.1.1", - "command-line-args": "5.2.1", - "command-line-usage": "7.0.1", - "flatbuffers": "23.5.26", + "@swc/helpers": "^0.5.2", + "@types/command-line-args": "^5.2.1", + "@types/command-line-usage": "^5.0.2", + "@types/node": "^20.6.0", + "@types/pad-left": "^2.1.1", + "command-line-args": "^5.2.1", + "command-line-usage": "^7.0.1", + "flatbuffers": "^23.5.26", "json-bignum": "^0.0.3", "pad-left": "^2.1.0", - "tslib": "^2.5.3" + "tslib": "^2.6.2" }, "devDependencies": { "@openpgp/web-stream-tools": "0.0.13", "@rollup/plugin-alias": "5.0.0", "@rollup/plugin-node-resolve": "15.1.0", - "@rollup/stream": "3.0.0", - "@types/benchmark": "2.1.2", + "@rollup/stream": "3.0.1", + "@swc/core": "1.3.82", + "@types/benchmark": "2.1.4", "@types/glob": "8.1.0", "@types/jest": "29.5.3", "@types/randomatic": "3.1.3", @@ -81,7 +83,7 @@ "del-cli": "5.1.0", "esbuild": "0.19.2", "esbuild-plugin-alias": "0.2.1", - "eslint": "8.42.0", + "eslint": "8.52.0", "eslint-plugin-jest": "27.4.2", "eslint-plugin-unicorn": "47.0.0", "esm": "https://github.com/jsg2021/esm/releases/download/v3.x.x-pr883/esm-3.x.x-pr883.tgz", @@ -103,7 +105,8 @@ "mkdirp": "3.0.1", "multistream": "4.1.0", "randomatic": "3.1.1", - "rollup": "3.25.0", + "regenerator-runtime": "0.14.0", + "rollup": "4.3.0", "rxjs": "7.8.1", "ts-jest": "29.1.1", "ts-node": "10.9.1", @@ -121,5 +124,5 @@ "engines": { "node": ">=12.0" }, - "version": "14.0.0-SNAPSHOT" + "version": "15.0.0-SNAPSHOT" } diff --git a/js/perf/index.ts b/js/perf/index.ts old mode 100644 new mode 100755 index 40225a6d8ae8c..2869470b4697f --- a/js/perf/index.ts +++ b/js/perf/index.ts @@ -1,3 +1,5 @@ +#! /usr/bin/env -S node --no-warnings --loader ts-node/esm/transpile-only + // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information @@ -15,24 +17,23 @@ // specific language governing permissions and limitations // under the License. -// Alternatively, use bundles for performance tests -// import * as Arrow from '../targets/es5/umd/Arrow.js'; +export { Arrow }; + +import * as Arrow from '../src/Arrow.ts'; +// import Arrow from '../targets/es5/umd/Arrow.js'; // import * as Arrow from '../targets/es5/esm/Arrow.js'; // import * as Arrow from '../targets/es5/cjs/Arrow.js'; -// import * as Arrow from '../targets/es2015/umd/Arrow.js'; +// import Arrow from '../targets/es2015/umd/Arrow.js'; // import * as Arrow from '../targets/es2015/esm/Arrow.js'; // import * as Arrow from '../targets/es2015/cjs/Arrow.js'; -// import * as Arrow from '../targets/esnext/umd/Arrow.js'; +// import Arrow from '../targets/esnext/umd/Arrow.js'; // import * as Arrow from '../targets/esnext/esm/Arrow.js'; // import * as Arrow from '../targets/esnext/cjs/Arrow.js'; -import * as Arrow from '../src/Arrow.js'; - import config, { arrays, typedArrays, vectors } from './config.js'; import b from 'benny'; import { CaseResult, Summary } from 'benny/lib/internal/common-types'; import kleur from 'kleur'; -export { Arrow }; const { RecordBatchReader, RecordBatchStreamWriter } = Arrow; diff --git a/js/src/io/adapters.ts b/js/src/io/adapters.ts index d0c8f7a5a99f0..05020314aa6b9 100644 --- a/js/src/io/adapters.ts +++ b/js/src/io/adapters.ts @@ -71,7 +71,7 @@ function* fromIterable(source: Iterable | T): } // Yield so the caller can inject the read command before creating the source Iterator - ({ cmd, size } = yield null); + ({ cmd, size } = (yield (() => null)()) || {cmd: 'read', size: 0}); // initialize the iterator const it = toUint8ArrayIterator(source)[Symbol.iterator](); @@ -117,7 +117,7 @@ async function* fromAsyncIterable(source: AsyncI } // Yield so the caller can inject the read command before creating the source AsyncIterator - ({ cmd, size } = (yield null)!); + ({ cmd, size } = (yield (() => null)()) || {cmd: 'read', size: 0}); // initialize the iterator const it = toUint8ArrayAsyncIterator(source)[Symbol.asyncIterator](); @@ -167,7 +167,7 @@ async function* fromDOMStream(source: ReadableSt } // Yield so the caller can inject the read command before we establish the ReadableStream lock - ({ cmd, size } = yield null); + ({ cmd, size } = (yield (() => null)()) || {cmd: 'read', size: 0}); // initialize the reader and lock the stream const it = new AdaptiveByteReader(source); @@ -273,7 +273,7 @@ async function* fromNodeStream(stream: NodeJS.ReadableStream): AsyncUint8ArrayGe // Yield so the caller can inject the read command before we // add the listener for the source stream's 'readable' event. - ({ cmd, size } = yield null); + ({ cmd, size } = (yield (() => null)()) || {cmd: 'read', size: 0}); // ignore stdin if it's a TTY if ((stream as any)['isTTY']) { diff --git a/js/src/recordbatch.ts b/js/src/recordbatch.ts index 59505a70a3c81..1ea7c52ccf310 100644 --- a/js/src/recordbatch.ts +++ b/js/src/recordbatch.ts @@ -203,7 +203,7 @@ export class RecordBatch { * Returns a child Vector by index, or null if this Vector has no child at the supplied index. * @param index The index of the child to retrieve. */ - public getChildAt(index: number): Vector | null { + public getChildAt(index: number): Vector | null { if (index > -1 && index < this.schema.fields.length) { return new Vector([this.data.children[index]]) as Vector; } diff --git a/js/tsconfig.json b/js/tsconfig.json index fa352302eaadc..abdd1815a0d98 100644 --- a/js/tsconfig.json +++ b/js/tsconfig.json @@ -15,18 +15,11 @@ "paths": { "apache-arrow": ["src/Arrow.node"], "apache-arrow/*": ["src/*"] - } + }, + "allowImportingTsExtensions": true, }, - "include": ["src/**/*.ts", "test/**/*.ts", "perf/**/*.ts"], + "include": ["bin/**/*.ts", "src/**/*.ts", "test/**/*.ts", "perf/**/*.ts"], "ts-node": { - "transpileOnly": true, - "experimentalResolver": true, - "compilerOptions": { - "module": "CommonJS" - }, - "moduleTypes": { - "index.ts": "cjs", - "src/**/*": "cjs", - } + "swc": true } } diff --git a/js/tsconfig/tsconfig.es2015.cls.json b/js/tsconfig/tsconfig.es2015.cls.json index 7db6858c767d3..edac1038ee301 100644 --- a/js/tsconfig/tsconfig.es2015.cls.json +++ b/js/tsconfig/tsconfig.es2015.cls.json @@ -4,8 +4,6 @@ "compilerOptions": { "target": "ES2020", "module": "ES2015", - "declaration": false, - "declarationMap": false, "noEmitHelpers": true, "importHelpers": false } diff --git a/js/tsconfig/tsconfig.es5.cls.json b/js/tsconfig/tsconfig.es5.cls.json index 0751eb0c96d7f..a6a4e80b2745b 100644 --- a/js/tsconfig/tsconfig.es5.cls.json +++ b/js/tsconfig/tsconfig.es5.cls.json @@ -4,8 +4,6 @@ "compilerOptions": { "target": "ES2020", "module": "ES2015", - "declaration": false, - "declarationMap": false, "noEmitHelpers": true, "importHelpers": false } diff --git a/js/tsconfig/tsconfig.esnext.cls.json b/js/tsconfig/tsconfig.esnext.cls.json index f9a456693d9db..335e759904159 100644 --- a/js/tsconfig/tsconfig.esnext.cls.json +++ b/js/tsconfig/tsconfig.esnext.cls.json @@ -4,8 +4,6 @@ "compilerOptions": { "target": "ES2020", "module": "ES2015", - "declaration": false, - "declarationMap": false, "noEmitHelpers": true, "importHelpers": false } diff --git a/js/yarn.lock b/js/yarn.lock index 6677772086ad5..eddf380d1ffad 100644 --- a/js/yarn.lock +++ b/js/yarn.lock @@ -4,15 +4,20 @@ "@75lb/deep-merge@^1.1.1": version "1.1.1" - resolved "https://registry.yarnpkg.com/@75lb/deep-merge/-/deep-merge-1.1.1.tgz#3b06155b90d34f5f8cc2107d796f1853ba02fd6d" + resolved "https://registry.npmjs.org/@75lb/deep-merge/-/deep-merge-1.1.1.tgz#3b06155b90d34f5f8cc2107d796f1853ba02fd6d" integrity sha512-xvgv6pkMGBA6GwdyJbNAnDmfAIR/DfWhrj9jgWh3TY7gRm3KO46x/GPjRg6wJ0nOepwqrNxFfojebh0Df4h4Tw== dependencies: lodash.assignwith "^4.2.0" typical "^7.1.1" +"@aashutoshrathi/word-wrap@^1.2.3": + version "1.2.6" + resolved "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz#bd9154aec9983f77b3a034ecaa015c2e4201f6cf" + integrity sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA== + "@ampproject/remapping@^2.2.0": version "2.2.1" - resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.1.tgz#99e8e11851128b8702cd57c33684f1d0f260b630" + resolved "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.1.tgz#99e8e11851128b8702cd57c33684f1d0f260b630" integrity sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg== dependencies: "@jridgewell/gen-mapping" "^0.3.0" @@ -20,31 +25,31 @@ "@arrows/array@^1.4.1": version "1.4.1" - resolved "https://registry.yarnpkg.com/@arrows/array/-/array-1.4.1.tgz#a6580a08cee219755ca9a8eb14e956d3c29a5508" + resolved "https://registry.npmjs.org/@arrows/array/-/array-1.4.1.tgz#a6580a08cee219755ca9a8eb14e956d3c29a5508" integrity sha512-MGYS8xi3c4tTy1ivhrVntFvufoNzje0PchjEz6G/SsWRgUKxL4tKwS6iPdO8vsaJYldagAeWMd5KRD0aX3Q39g== dependencies: "@arrows/composition" "^1.2.2" "@arrows/composition@^1.0.0", "@arrows/composition@^1.2.2": version "1.2.2" - resolved "https://registry.yarnpkg.com/@arrows/composition/-/composition-1.2.2.tgz#d0a213cac8f8c36c1c75856a1e6ed940c27e9169" + resolved "https://registry.npmjs.org/@arrows/composition/-/composition-1.2.2.tgz#d0a213cac8f8c36c1c75856a1e6ed940c27e9169" integrity sha512-9fh1yHwrx32lundiB3SlZ/VwuStPB4QakPsSLrGJFH6rCXvdrd060ivAZ7/2vlqPnEjBkPRRXOcG1YOu19p2GQ== "@arrows/dispatch@^1.0.2": version "1.0.3" - resolved "https://registry.yarnpkg.com/@arrows/dispatch/-/dispatch-1.0.3.tgz#c4c06260f89e9dd4ce280df3712980aa2f3de976" + resolved "https://registry.npmjs.org/@arrows/dispatch/-/dispatch-1.0.3.tgz#c4c06260f89e9dd4ce280df3712980aa2f3de976" integrity sha512-v/HwvrFonitYZM2PmBlAlCqVqxrkIIoiEuy5bQgn0BdfvlL0ooSBzcPzTMrtzY8eYktPyYcHg8fLbSgyybXEqw== dependencies: "@arrows/composition" "^1.2.2" "@arrows/error@^1.0.2": version "1.0.2" - resolved "https://registry.yarnpkg.com/@arrows/error/-/error-1.0.2.tgz#4e68036f901118ba6f1de88656ef6be49e650414" + resolved "https://registry.npmjs.org/@arrows/error/-/error-1.0.2.tgz#4e68036f901118ba6f1de88656ef6be49e650414" integrity sha512-yvkiv1ay4Z3+Z6oQsUkedsQm5aFdyPpkBUQs8vejazU/RmANABx6bMMcBPPHI4aW43VPQmXFfBzr/4FExwWTEA== "@arrows/multimethod@^1.1.6": version "1.4.1" - resolved "https://registry.yarnpkg.com/@arrows/multimethod/-/multimethod-1.4.1.tgz#319d0b6f84d22522dd2f4b24f04137b6219f0300" + resolved "https://registry.npmjs.org/@arrows/multimethod/-/multimethod-1.4.1.tgz#319d0b6f84d22522dd2f4b24f04137b6219f0300" integrity sha512-AZnAay0dgPnCJxn3We5uKiB88VL+1ZIF2SjZohLj6vqY2UyvB/sKdDnFP+LZNVsTC5lcnGPmLlRRkAh4sXkXsQ== dependencies: "@arrows/array" "^1.4.1" @@ -52,53 +57,46 @@ "@arrows/error" "^1.0.2" fast-deep-equal "^3.1.3" -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.5.tgz#234d98e1551960604f1246e6475891a570ad5658" - integrity sha512-Xmwn266vad+6DAqEB2A6V/CcZVp62BbwVmcOJc2RPuwih1kw02TjQvWVWlcKGbBPd+8/0V5DEkOcizRGYsspYQ== - dependencies: - "@babel/highlight" "^7.22.5" - -"@babel/code-frame@^7.22.13": +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.22.13": version "7.22.13" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.13.tgz#e3c1c099402598483b7a8c46a721d1038803755e" + resolved "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.13.tgz#e3c1c099402598483b7a8c46a721d1038803755e" integrity sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w== dependencies: "@babel/highlight" "^7.22.13" chalk "^2.4.2" -"@babel/compat-data@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.22.5.tgz#b1f6c86a02d85d2dd3368a2b67c09add8cd0c255" - integrity sha512-4Jc/YuIaYqKnDDz892kPIledykKg12Aw1PYX5i/TY28anJtacvM1Rrr8wbieB9GfEJwlzqT0hUEao0CxEebiDA== +"@babel/compat-data@^7.22.9": + version "7.23.2" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.23.2.tgz#6a12ced93455827037bfb5ed8492820d60fc32cc" + integrity sha512-0S9TQMmDHlqAZ2ITT95irXKfxN9bncq8ZCoJhun3nHL/lLUxd2NKBJYoNGWH7S0hz6fRQwWlAWn/ILM0C70KZQ== "@babel/core@^7.11.6", "@babel/core@^7.12.3": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.22.5.tgz#d67d9747ecf26ee7ecd3ebae1ee22225fe902a89" - integrity sha512-SBuTAjg91A3eKOvD+bPEz3LlhHZRNu1nFOVts9lzDJTXshHTjII0BAtDS3Y2DAkdZdDKWVZGVwkDfc4Clxn1dg== + version "7.22.19" + resolved "https://registry.npmjs.org/@babel/core/-/core-7.22.19.tgz#b38162460a6f3baf2a424bda720b24a8aafea241" + integrity sha512-Q8Yj5X4LHVYTbLCKVz0//2D2aDmHF4xzCdEttYvKOnWvErGsa6geHXD6w46x64n5tP69VfeH+IfSrdyH3MLhwA== dependencies: "@ampproject/remapping" "^2.2.0" - "@babel/code-frame" "^7.22.5" - "@babel/generator" "^7.22.5" - "@babel/helper-compilation-targets" "^7.22.5" - "@babel/helper-module-transforms" "^7.22.5" - "@babel/helpers" "^7.22.5" - "@babel/parser" "^7.22.5" - "@babel/template" "^7.22.5" - "@babel/traverse" "^7.22.5" - "@babel/types" "^7.22.5" + "@babel/code-frame" "^7.22.13" + "@babel/generator" "^7.22.15" + "@babel/helper-compilation-targets" "^7.22.15" + "@babel/helper-module-transforms" "^7.22.19" + "@babel/helpers" "^7.22.15" + "@babel/parser" "^7.22.16" + "@babel/template" "^7.22.15" + "@babel/traverse" "^7.22.19" + "@babel/types" "^7.22.19" convert-source-map "^1.7.0" debug "^4.1.0" gensync "^1.0.0-beta.2" - json5 "^2.2.2" - semver "^6.3.0" + json5 "^2.2.3" + semver "^6.3.1" -"@babel/generator@^7.22.5", "@babel/generator@^7.7.2": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.22.5.tgz#1e7bf768688acfb05cf30b2369ef855e82d984f7" - integrity sha512-+lcUbnTRhd0jOewtFSedLyiPsD5tswKkbgcezOqqWFUVNEwoUTlpPOBmvhG7OXWLR4jMdv0czPGH5XbflnD1EA== +"@babel/generator@^7.22.15", "@babel/generator@^7.7.2": + version "7.22.15" + resolved "https://registry.npmjs.org/@babel/generator/-/generator-7.22.15.tgz#1564189c7ec94cb8f77b5e8a90c4d200d21b2339" + integrity sha512-Zu9oWARBqeVOW0dZOjXc3JObrzuqothQ3y/n1kUtrjCoCPLkXUwMvOo/F/TCfoHMbWIFlWwpZtkZVb9ga4U2pA== dependencies: - "@babel/types" "^7.22.5" + "@babel/types" "^7.22.15" "@jridgewell/gen-mapping" "^0.3.2" "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" @@ -113,16 +111,16 @@ "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" -"@babel/helper-compilation-targets@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.5.tgz#fc7319fc54c5e2fa14b2909cf3c5fd3046813e02" - integrity sha512-Ji+ywpHeuqxB8WDxraCiqR0xfhYjiDE/e6k7FuIaANnoOFxAHskHChz4vA1mJC9Lbm01s1PVAGhQY4FUKSkGZw== +"@babel/helper-compilation-targets@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.15.tgz#0698fc44551a26cf29f18d4662d5bf545a6cfc52" + integrity sha512-y6EEzULok0Qvz8yyLkCvVX+02ic+By2UdOhylwUOvOn9dvYc9mKICJuuU1n1XBI02YWsNsnrY1kc6DVbjcXbtw== dependencies: - "@babel/compat-data" "^7.22.5" - "@babel/helper-validator-option" "^7.22.5" - browserslist "^4.21.3" + "@babel/compat-data" "^7.22.9" + "@babel/helper-validator-option" "^7.22.15" + browserslist "^4.21.9" lru-cache "^5.1.1" - semver "^6.3.0" + semver "^6.3.1" "@babel/helper-environment-visitor@^7.22.20": version "7.22.20" @@ -131,7 +129,7 @@ "@babel/helper-environment-visitor@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz#f06dd41b7c1f44e1f8da6c4055b41ab3a09a7e98" + resolved "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz#f06dd41b7c1f44e1f8da6c4055b41ab3a09a7e98" integrity sha512-XGmhECfVA/5sAt+H+xpSg0mfrHq6FzNr9Oxh7PSEBBRUb/mL7Kz3NICXb194rCqAEdxkhPT1a88teizAFyvk8Q== "@babel/helper-function-name@^7.23.0": @@ -144,51 +142,41 @@ "@babel/helper-hoist-variables@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" + resolved "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw== dependencies: "@babel/types" "^7.22.5" -"@babel/helper-module-imports@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.22.5.tgz#1a8f4c9f4027d23f520bd76b364d44434a72660c" - integrity sha512-8Dl6+HD/cKifutF5qGd/8ZJi84QeAKh+CEe1sBzz8UayBBGg1dAIJrdHOcOM5b2MpzWL2yuotJTtGjETq0qjXg== +"@babel/helper-module-imports@^7.22.15": + version "7.22.15" + resolved "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz#16146307acdc40cc00c3b2c647713076464bdbf0" + integrity sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w== dependencies: - "@babel/types" "^7.22.5" + "@babel/types" "^7.22.15" -"@babel/helper-module-transforms@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.22.5.tgz#0f65daa0716961b6e96b164034e737f60a80d2ef" - integrity sha512-+hGKDt/Ze8GFExiVHno/2dvG5IdstpzCq0y4Qc9OJ25D4q3pKfiIP/4Vp3/JvhDkLKsDK2api3q3fpIgiIF5bw== +"@babel/helper-module-transforms@^7.22.19": + version "7.22.19" + resolved "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.22.19.tgz#94b1f281caa6518f02ec0f5ea2b5348e298ce266" + integrity sha512-m6h1cJvn+OJ+R3jOHp30faq5xKJ7VbjwDj5RGgHuRlU9hrMeKsGC+JpihkR5w1g7IfseCPPtZ0r7/hB4UKaYlA== dependencies: "@babel/helper-environment-visitor" "^7.22.5" - "@babel/helper-module-imports" "^7.22.5" + "@babel/helper-module-imports" "^7.22.15" "@babel/helper-simple-access" "^7.22.5" - "@babel/helper-split-export-declaration" "^7.22.5" - "@babel/helper-validator-identifier" "^7.22.5" - "@babel/template" "^7.22.5" - "@babel/traverse" "^7.22.5" - "@babel/types" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/helper-validator-identifier" "^7.22.19" "@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.8.0": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz#dd7ee3735e8a313b9f7b05a773d892e88e6d7295" + resolved "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz#dd7ee3735e8a313b9f7b05a773d892e88e6d7295" integrity sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg== "@babel/helper-simple-access@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz#4938357dc7d782b80ed6dbb03a0fba3d22b1d5de" + resolved "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz#4938357dc7d782b80ed6dbb03a0fba3d22b1d5de" integrity sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w== dependencies: "@babel/types" "^7.22.5" -"@babel/helper-split-export-declaration@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.5.tgz#88cf11050edb95ed08d596f7a044462189127a08" - integrity sha512-thqK5QFghPKWLhAV321lxF95yCg2K3Ob5yw+M3VHWfdia0IkPXUtoLH8x/6Fh486QUvzhb8YOWHChTVen2/PoQ== - dependencies: - "@babel/types" "^7.22.5" - "@babel/helper-split-export-declaration@^7.22.6": version "7.22.6" resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz#322c61b7310c0997fe4c323955667f18fcefb91c" @@ -198,32 +186,32 @@ "@babel/helper-string-parser@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f" + resolved "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f" integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw== -"@babel/helper-validator-identifier@^7.19.1", "@babel/helper-validator-identifier@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz#9544ef6a33999343c8740fa51350f30eeaaaf193" - integrity sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ== +"@babel/helper-validator-identifier@^7.19.1", "@babel/helper-validator-identifier@^7.22.19": + version "7.22.19" + resolved "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.19.tgz#2f34ab1e445f5b95e2e6edfe50ea2449e610583a" + integrity sha512-Tinq7ybnEPFFXhlYOYFiSjespWQk0dq2dRNAiMdRTOYQzEGqnnNyrTxPYHP5r6wGjlF1rFgABdDV0g8EwD6Qbg== "@babel/helper-validator-identifier@^7.22.20": version "7.22.20" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== -"@babel/helper-validator-option@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz#de52000a15a177413c8234fa3a8af4ee8102d0ac" - integrity sha512-R3oB6xlIVKUnxNUxbmgq7pKjxpru24zlimpE8WK47fACIlM0II/Hm1RS8IaOI7NgCr6LNS+jl5l75m20npAziw== +"@babel/helper-validator-option@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.22.15.tgz#694c30dfa1d09a6534cdfcafbe56789d36aba040" + integrity sha512-bMn7RmyFjY/mdECUbgn9eoSY4vqvacUnS9i9vGAGttgFWesO6B4CYWA7XlpbWgBt71iv/hfbPlynohStqnu5hA== -"@babel/helpers@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.22.5.tgz#74bb4373eb390d1ceed74a15ef97767e63120820" - integrity sha512-pSXRmfE1vzcUIDFQcSGA5Mr+GxBV9oiRKDuDxXvWQQBCh8HoIjs/2DlDB7H8smac1IVrB9/xdXj2N3Wol9Cr+Q== +"@babel/helpers@^7.22.15": + version "7.22.15" + resolved "https://registry.npmjs.org/@babel/helpers/-/helpers-7.22.15.tgz#f09c3df31e86e3ea0b7ff7556d85cdebd47ea6f1" + integrity sha512-7pAjK0aSdxOwR+CcYAqgWOGy5dcfvzsTIfFTb2odQqW47MDfv14UaJDY6eng8ylM2EaeKXdxaSWESbkmaQHTmw== dependencies: - "@babel/template" "^7.22.5" - "@babel/traverse" "^7.22.5" - "@babel/types" "^7.22.5" + "@babel/template" "^7.22.15" + "@babel/traverse" "^7.22.15" + "@babel/types" "^7.22.15" "@babel/highlight@^7.22.13": version "7.22.20" @@ -234,19 +222,10 @@ chalk "^2.4.2" js-tokens "^4.0.0" -"@babel/highlight@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.22.5.tgz#aa6c05c5407a67ebce408162b7ede789b4d22031" - integrity sha512-BSKlD1hgnedS5XRnGOljZawtag7H1yPfQp0tdNJCHoH6AZ+Pcm9VvkrK59/Yy593Ypg0zMxH2BxD1VPYUQ7UIw== - dependencies: - "@babel/helper-validator-identifier" "^7.22.5" - chalk "^2.0.0" - js-tokens "^4.0.0" - -"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.22.5.tgz#721fd042f3ce1896238cf1b341c77eb7dee7dbea" - integrity sha512-DFZMC9LJUG9PLOclRC32G63UXwzqS2koQC8dkx+PLdmt1xSePYpbT/NbsrJy8Q/muXz7o/h/d4A7Fuyixm559Q== +"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.22.16": + version "7.22.16" + resolved "https://registry.npmjs.org/@babel/parser/-/parser-7.22.16.tgz#180aead7f247305cce6551bea2720934e2fa2c95" + integrity sha512-+gPfKv8UWeKKeJTUxe59+OobVcrYHETCsORl61EmSkmgymguYk/X5bp7GuUIXaFsc6y++v8ZxPsLSSuujqDphA== "@babel/parser@^7.22.15", "@babel/parser@^7.23.0": version "7.23.0" @@ -255,98 +234,98 @@ "@babel/plugin-syntax-async-generators@^7.8.4": version "7.8.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-bigint@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-class-properties@^7.8.3": version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== dependencies: "@babel/helper-plugin-utils" "^7.12.13" "@babel/plugin-syntax-import-meta@^7.8.3": version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== dependencies: "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-json-strings@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-jsx@^7.7.2": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.22.5.tgz#a6b68e84fb76e759fc3b93e901876ffabbe1d918" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.22.5.tgz#a6b68e84fb76e759fc3b93e901876ffabbe1d918" integrity sha512-gvyP4hZrgrs/wWMaocvxZ44Hw0b3W8Pe+cMxc8V1ULQ07oh8VNbIRaoD1LRZVTvD+0nieDKjfgKg89sD7rrKrg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-logical-assignment-operators@^7.8.3": version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== dependencies: "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-numeric-separator@^7.8.3": version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== dependencies: "@babel/helper-plugin-utils" "^7.10.4" "@babel/plugin-syntax-object-rest-spread@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-optional-catch-binding@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-optional-chaining@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-syntax-top-level-await@^7.8.3": version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== dependencies: "@babel/helper-plugin-utils" "^7.14.5" "@babel/plugin-syntax-typescript@^7.7.2": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.22.5.tgz#aac8d383b062c5072c647a31ef990c1d0af90272" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.22.5.tgz#aac8d383b062c5072c647a31ef990c1d0af90272" integrity sha512-1mS2o03i7t1c6VzH6fdQ3OA8tcEIxwG18zIPRp+UY1Ihv6W+XZzBCVxExF9upussPXJ0xE9XRHwMoNs1ep/nRQ== dependencies: "@babel/helper-plugin-utils" "^7.22.5" @@ -360,16 +339,16 @@ "@babel/parser" "^7.22.15" "@babel/types" "^7.22.15" -"@babel/template@^7.22.5", "@babel/template@^7.3.3": +"@babel/template@^7.3.3": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.5.tgz#0c8c4d944509875849bd0344ff0050756eefc6ec" integrity sha512-X7yV7eiwAxdj9k94NEylvbVHLiVG1nvzCV2EAowhxLTwODV1jl9UzZ48leOC0sH7OnuHrIkllaBgneUykIcZaw== dependencies: - "@babel/code-frame" "^7.22.5" - "@babel/parser" "^7.22.5" - "@babel/types" "^7.22.5" + "@babel/code-frame" "^7.22.13" + "@babel/parser" "^7.22.15" + "@babel/types" "^7.22.15" -"@babel/traverse@^7.22.5": +"@babel/traverse@^7.22.15", "@babel/traverse@^7.22.19": version "7.23.2" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.2.tgz#329c7a06735e144a506bdb2cad0268b7f46f4ad8" integrity sha512-azpe59SQ48qG6nu2CzcMLbxUudtN+dOM9kDbUqGq3HXUJRlo7i8fvPoxQUzYgLZ4cMVmuZgm8vvBpNeRhd6XSw== @@ -385,13 +364,13 @@ debug "^4.1.0" globals "^11.1.0" -"@babel/types@^7.0.0", "@babel/types@^7.20.7", "@babel/types@^7.22.5", "@babel/types@^7.3.3": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.22.5.tgz#cd93eeaab025880a3a47ec881f4b096a5b786fbe" - integrity sha512-zo3MIHGOkPOfoRXitsgHLjEXmlDaD/5KU1Uzuc9GNiZPhSqVxVRtxuPaSBZDsYZ9qV88AjtMtWW7ww98loJ9KA== +"@babel/types@^7.0.0", "@babel/types@^7.20.7", "@babel/types@^7.22.19", "@babel/types@^7.22.5", "@babel/types@^7.3.3": + version "7.22.19" + resolved "https://registry.npmjs.org/@babel/types/-/types-7.22.19.tgz#7425343253556916e440e662bb221a93ddb75684" + integrity sha512-P7LAw/LbojPzkgp5oznjE6tQEIWbp4PkkfrZDINTro9zgBRtI324/EYsiSI7lhPbpIQ+DCeR2NNmMWANGGfZsg== dependencies: "@babel/helper-string-parser" "^7.22.5" - "@babel/helper-validator-identifier" "^7.22.5" + "@babel/helper-validator-identifier" "^7.22.19" to-fast-properties "^2.0.0" "@babel/types@^7.22.15", "@babel/types@^7.23.0": @@ -405,261 +384,261 @@ "@bcoe/v8-coverage@^0.2.3": version "0.2.3" - resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" + resolved "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== "@cspotcode/source-map-support@^0.8.0": version "0.8.1" - resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1" + resolved "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1" integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw== dependencies: "@jridgewell/trace-mapping" "0.3.9" "@discoveryjs/json-ext@0.5.7": version "0.5.7" - resolved "https://registry.yarnpkg.com/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz#1d572bfbbe14b7704e0ba0f39b74815b84870d70" + resolved "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz#1d572bfbbe14b7704e0ba0f39b74815b84870d70" integrity sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw== "@esbuild/android-arm64@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.17.19.tgz#bafb75234a5d3d1b690e7c2956a599345e84a2fd" + resolved "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.17.19.tgz#bafb75234a5d3d1b690e7c2956a599345e84a2fd" integrity sha512-KBMWvEZooR7+kzY0BtbTQn0OAYY7CsiydT63pVEaPtVYF0hXbUaOyZog37DKxK7NF3XacBJOpYT4adIJh+avxA== "@esbuild/android-arm64@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.19.2.tgz#bc35990f412a749e948b792825eef7df0ce0e073" + resolved "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.19.2.tgz#bc35990f412a749e948b792825eef7df0ce0e073" integrity sha512-lsB65vAbe90I/Qe10OjkmrdxSX4UJDjosDgb8sZUKcg3oefEuW2OT2Vozz8ef7wrJbMcmhvCC+hciF8jY/uAkw== "@esbuild/android-arm@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.17.19.tgz#5898f7832c2298bc7d0ab53701c57beb74d78b4d" + resolved "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.17.19.tgz#5898f7832c2298bc7d0ab53701c57beb74d78b4d" integrity sha512-rIKddzqhmav7MSmoFCmDIb6e2W57geRsM94gV2l38fzhXMwq7hZoClug9USI2pFRGL06f4IOPHHpFNOkWieR8A== "@esbuild/android-arm@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.19.2.tgz#edd1c8f23ba353c197f5b0337123c58ff2a56999" + resolved "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.19.2.tgz#edd1c8f23ba353c197f5b0337123c58ff2a56999" integrity sha512-tM8yLeYVe7pRyAu9VMi/Q7aunpLwD139EY1S99xbQkT4/q2qa6eA4ige/WJQYdJ8GBL1K33pPFhPfPdJ/WzT8Q== "@esbuild/android-x64@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.17.19.tgz#658368ef92067866d95fb268719f98f363d13ae1" + resolved "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.17.19.tgz#658368ef92067866d95fb268719f98f363d13ae1" integrity sha512-uUTTc4xGNDT7YSArp/zbtmbhO0uEEK9/ETW29Wk1thYUJBz3IVnvgEiEwEa9IeLyvnpKrWK64Utw2bgUmDveww== "@esbuild/android-x64@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.19.2.tgz#2dcdd6e6f1f2d82ea1b746abd8da5b284960f35a" + resolved "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.19.2.tgz#2dcdd6e6f1f2d82ea1b746abd8da5b284960f35a" integrity sha512-qK/TpmHt2M/Hg82WXHRc/W/2SGo/l1thtDHZWqFq7oi24AjZ4O/CpPSu6ZuYKFkEgmZlFoa7CooAyYmuvnaG8w== "@esbuild/darwin-arm64@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.17.19.tgz#584c34c5991b95d4d48d333300b1a4e2ff7be276" + resolved "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.17.19.tgz#584c34c5991b95d4d48d333300b1a4e2ff7be276" integrity sha512-80wEoCfF/hFKM6WE1FyBHc9SfUblloAWx6FJkFWTWiCoht9Mc0ARGEM47e67W9rI09YoUxJL68WHfDRYEAvOhg== "@esbuild/darwin-arm64@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.19.2.tgz#55b36bc06d76f5c243987c1f93a11a80d8fc3b26" + resolved "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.19.2.tgz#55b36bc06d76f5c243987c1f93a11a80d8fc3b26" integrity sha512-Ora8JokrvrzEPEpZO18ZYXkH4asCdc1DLdcVy8TGf5eWtPO1Ie4WroEJzwI52ZGtpODy3+m0a2yEX9l+KUn0tA== "@esbuild/darwin-x64@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.17.19.tgz#7751d236dfe6ce136cce343dce69f52d76b7f6cb" + resolved "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.17.19.tgz#7751d236dfe6ce136cce343dce69f52d76b7f6cb" integrity sha512-IJM4JJsLhRYr9xdtLytPLSH9k/oxR3boaUIYiHkAawtwNOXKE8KoU8tMvryogdcT8AU+Bflmh81Xn6Q0vTZbQw== "@esbuild/darwin-x64@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.19.2.tgz#982524af33a6424a3b5cb44bbd52559623ad719c" + resolved "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.19.2.tgz#982524af33a6424a3b5cb44bbd52559623ad719c" integrity sha512-tP+B5UuIbbFMj2hQaUr6EALlHOIOmlLM2FK7jeFBobPy2ERdohI4Ka6ZFjZ1ZYsrHE/hZimGuU90jusRE0pwDw== "@esbuild/freebsd-arm64@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.17.19.tgz#cacd171665dd1d500f45c167d50c6b7e539d5fd2" + resolved "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.17.19.tgz#cacd171665dd1d500f45c167d50c6b7e539d5fd2" integrity sha512-pBwbc7DufluUeGdjSU5Si+P3SoMF5DQ/F/UmTSb8HXO80ZEAJmrykPyzo1IfNbAoaqw48YRpv8shwd1NoI0jcQ== "@esbuild/freebsd-arm64@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.2.tgz#8e478a0856645265fe79eac4b31b52193011ee06" + resolved "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.2.tgz#8e478a0856645265fe79eac4b31b52193011ee06" integrity sha512-YbPY2kc0acfzL1VPVK6EnAlig4f+l8xmq36OZkU0jzBVHcOTyQDhnKQaLzZudNJQyymd9OqQezeaBgkTGdTGeQ== "@esbuild/freebsd-x64@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.17.19.tgz#0769456eee2a08b8d925d7c00b79e861cb3162e4" + resolved "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.17.19.tgz#0769456eee2a08b8d925d7c00b79e861cb3162e4" integrity sha512-4lu+n8Wk0XlajEhbEffdy2xy53dpR06SlzvhGByyg36qJw6Kpfk7cp45DR/62aPH9mtJRmIyrXAS5UWBrJT6TQ== "@esbuild/freebsd-x64@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.19.2.tgz#01b96604f2540db023c73809bb8ae6cd1692d6f3" + resolved "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.19.2.tgz#01b96604f2540db023c73809bb8ae6cd1692d6f3" integrity sha512-nSO5uZT2clM6hosjWHAsS15hLrwCvIWx+b2e3lZ3MwbYSaXwvfO528OF+dLjas1g3bZonciivI8qKR/Hm7IWGw== "@esbuild/linux-arm64@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.17.19.tgz#38e162ecb723862c6be1c27d6389f48960b68edb" + resolved "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.17.19.tgz#38e162ecb723862c6be1c27d6389f48960b68edb" integrity sha512-ct1Tg3WGwd3P+oZYqic+YZF4snNl2bsnMKRkb3ozHmnM0dGWuxcPTTntAF6bOP0Sp4x0PjSF+4uHQ1xvxfRKqg== "@esbuild/linux-arm64@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.19.2.tgz#7e5d2c7864c5c83ec789b59c77cd9c20d2594916" + resolved "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.19.2.tgz#7e5d2c7864c5c83ec789b59c77cd9c20d2594916" integrity sha512-ig2P7GeG//zWlU0AggA3pV1h5gdix0MA3wgB+NsnBXViwiGgY77fuN9Wr5uoCrs2YzaYfogXgsWZbm+HGr09xg== "@esbuild/linux-arm@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.17.19.tgz#1a2cd399c50040184a805174a6d89097d9d1559a" + resolved "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.17.19.tgz#1a2cd399c50040184a805174a6d89097d9d1559a" integrity sha512-cdmT3KxjlOQ/gZ2cjfrQOtmhG4HJs6hhvm3mWSRDPtZ/lP5oe8FWceS10JaSJC13GBd4eH/haHnqf7hhGNLerA== "@esbuild/linux-arm@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.19.2.tgz#c32ae97bc0246664a1cfbdb4a98e7b006d7db8ae" + resolved "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.19.2.tgz#c32ae97bc0246664a1cfbdb4a98e7b006d7db8ae" integrity sha512-Odalh8hICg7SOD7XCj0YLpYCEc+6mkoq63UnExDCiRA2wXEmGlK5JVrW50vZR9Qz4qkvqnHcpH+OFEggO3PgTg== "@esbuild/linux-ia32@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.17.19.tgz#e28c25266b036ce1cabca3c30155222841dc035a" + resolved "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.17.19.tgz#e28c25266b036ce1cabca3c30155222841dc035a" integrity sha512-w4IRhSy1VbsNxHRQpeGCHEmibqdTUx61Vc38APcsRbuVgK0OPEnQ0YD39Brymn96mOx48Y2laBQGqgZ0j9w6SQ== "@esbuild/linux-ia32@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.19.2.tgz#3fc4f0fa026057fe885e4a180b3956e704f1ceaa" + resolved "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.19.2.tgz#3fc4f0fa026057fe885e4a180b3956e704f1ceaa" integrity sha512-mLfp0ziRPOLSTek0Gd9T5B8AtzKAkoZE70fneiiyPlSnUKKI4lp+mGEnQXcQEHLJAcIYDPSyBvsUbKUG2ri/XQ== "@esbuild/linux-loong64@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.17.19.tgz#0f887b8bb3f90658d1a0117283e55dbd4c9dcf72" + resolved "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.17.19.tgz#0f887b8bb3f90658d1a0117283e55dbd4c9dcf72" integrity sha512-2iAngUbBPMq439a+z//gE+9WBldoMp1s5GWsUSgqHLzLJ9WoZLZhpwWuym0u0u/4XmZ3gpHmzV84PonE+9IIdQ== "@esbuild/linux-loong64@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.19.2.tgz#633bcaea443f3505fb0ed109ab840c99ad3451a4" + resolved "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.19.2.tgz#633bcaea443f3505fb0ed109ab840c99ad3451a4" integrity sha512-hn28+JNDTxxCpnYjdDYVMNTR3SKavyLlCHHkufHV91fkewpIyQchS1d8wSbmXhs1fiYDpNww8KTFlJ1dHsxeSw== "@esbuild/linux-mips64el@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.17.19.tgz#f5d2a0b8047ea9a5d9f592a178ea054053a70289" + resolved "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.17.19.tgz#f5d2a0b8047ea9a5d9f592a178ea054053a70289" integrity sha512-LKJltc4LVdMKHsrFe4MGNPp0hqDFA1Wpt3jE1gEyM3nKUvOiO//9PheZZHfYRfYl6AwdTH4aTcXSqBerX0ml4A== "@esbuild/linux-mips64el@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.19.2.tgz#e0bff2898c46f52be7d4dbbcca8b887890805823" + resolved "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.19.2.tgz#e0bff2898c46f52be7d4dbbcca8b887890805823" integrity sha512-KbXaC0Sejt7vD2fEgPoIKb6nxkfYW9OmFUK9XQE4//PvGIxNIfPk1NmlHmMg6f25x57rpmEFrn1OotASYIAaTg== "@esbuild/linux-ppc64@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.17.19.tgz#876590e3acbd9fa7f57a2c7d86f83717dbbac8c7" + resolved "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.17.19.tgz#876590e3acbd9fa7f57a2c7d86f83717dbbac8c7" integrity sha512-/c/DGybs95WXNS8y3Ti/ytqETiW7EU44MEKuCAcpPto3YjQbyK3IQVKfF6nbghD7EcLUGl0NbiL5Rt5DMhn5tg== "@esbuild/linux-ppc64@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.19.2.tgz#d75798da391f54a9674f8c143b9a52d1dbfbfdde" + resolved "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.19.2.tgz#d75798da391f54a9674f8c143b9a52d1dbfbfdde" integrity sha512-dJ0kE8KTqbiHtA3Fc/zn7lCd7pqVr4JcT0JqOnbj4LLzYnp+7h8Qi4yjfq42ZlHfhOCM42rBh0EwHYLL6LEzcw== "@esbuild/linux-riscv64@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.17.19.tgz#7f49373df463cd9f41dc34f9b2262d771688bf09" + resolved "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.17.19.tgz#7f49373df463cd9f41dc34f9b2262d771688bf09" integrity sha512-FC3nUAWhvFoutlhAkgHf8f5HwFWUL6bYdvLc/TTuxKlvLi3+pPzdZiFKSWz/PF30TB1K19SuCxDTI5KcqASJqA== "@esbuild/linux-riscv64@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.19.2.tgz#012409bd489ed1bb9b775541d4a46c5ded8e6dd8" + resolved "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.19.2.tgz#012409bd489ed1bb9b775541d4a46c5ded8e6dd8" integrity sha512-7Z/jKNFufZ/bbu4INqqCN6DDlrmOTmdw6D0gH+6Y7auok2r02Ur661qPuXidPOJ+FSgbEeQnnAGgsVynfLuOEw== "@esbuild/linux-s390x@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.17.19.tgz#e2afd1afcaf63afe2c7d9ceacd28ec57c77f8829" + resolved "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.17.19.tgz#e2afd1afcaf63afe2c7d9ceacd28ec57c77f8829" integrity sha512-IbFsFbxMWLuKEbH+7sTkKzL6NJmG2vRyy6K7JJo55w+8xDk7RElYn6xvXtDW8HCfoKBFK69f3pgBJSUSQPr+4Q== "@esbuild/linux-s390x@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.19.2.tgz#ece3ed75c5a150de8a5c110f02e97d315761626b" + resolved "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.19.2.tgz#ece3ed75c5a150de8a5c110f02e97d315761626b" integrity sha512-U+RinR6aXXABFCcAY4gSlv4CL1oOVvSSCdseQmGO66H+XyuQGZIUdhG56SZaDJQcLmrSfRmx5XZOWyCJPRqS7g== "@esbuild/linux-x64@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.17.19.tgz#8a0e9738b1635f0c53389e515ae83826dec22aa4" + resolved "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.17.19.tgz#8a0e9738b1635f0c53389e515ae83826dec22aa4" integrity sha512-68ngA9lg2H6zkZcyp22tsVt38mlhWde8l3eJLWkyLrp4HwMUr3c1s/M2t7+kHIhvMjglIBrFpncX1SzMckomGw== "@esbuild/linux-x64@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.19.2.tgz#dea187019741602d57aaf189a80abba261fbd2aa" + resolved "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.19.2.tgz#dea187019741602d57aaf189a80abba261fbd2aa" integrity sha512-oxzHTEv6VPm3XXNaHPyUTTte+3wGv7qVQtqaZCrgstI16gCuhNOtBXLEBkBREP57YTd68P0VgDgG73jSD8bwXQ== "@esbuild/netbsd-x64@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.17.19.tgz#c29fb2453c6b7ddef9a35e2c18b37bda1ae5c462" + resolved "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.17.19.tgz#c29fb2453c6b7ddef9a35e2c18b37bda1ae5c462" integrity sha512-CwFq42rXCR8TYIjIfpXCbRX0rp1jo6cPIUPSaWwzbVI4aOfX96OXY8M6KNmtPcg7QjYeDmN+DD0Wp3LaBOLf4Q== "@esbuild/netbsd-x64@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.19.2.tgz#bbfd7cf9ab236a23ee3a41b26f0628c57623d92a" + resolved "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.19.2.tgz#bbfd7cf9ab236a23ee3a41b26f0628c57623d92a" integrity sha512-WNa5zZk1XpTTwMDompZmvQLHszDDDN7lYjEHCUmAGB83Bgs20EMs7ICD+oKeT6xt4phV4NDdSi/8OfjPbSbZfQ== "@esbuild/openbsd-x64@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.17.19.tgz#95e75a391403cb10297280d524d66ce04c920691" + resolved "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.17.19.tgz#95e75a391403cb10297280d524d66ce04c920691" integrity sha512-cnq5brJYrSZ2CF6c35eCmviIN3k3RczmHz8eYaVlNasVqsNY+JKohZU5MKmaOI+KkllCdzOKKdPs762VCPC20g== "@esbuild/openbsd-x64@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.19.2.tgz#fa5c4c6ee52a360618f00053652e2902e1d7b4a7" + resolved "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.19.2.tgz#fa5c4c6ee52a360618f00053652e2902e1d7b4a7" integrity sha512-S6kI1aT3S++Dedb7vxIuUOb3oAxqxk2Rh5rOXOTYnzN8JzW1VzBd+IqPiSpgitu45042SYD3HCoEyhLKQcDFDw== "@esbuild/sunos-x64@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.17.19.tgz#722eaf057b83c2575937d3ffe5aeb16540da7273" + resolved "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.17.19.tgz#722eaf057b83c2575937d3ffe5aeb16540da7273" integrity sha512-vCRT7yP3zX+bKWFeP/zdS6SqdWB8OIpaRq/mbXQxTGHnIxspRtigpkUcDMlSCOejlHowLqII7K2JKevwyRP2rg== "@esbuild/sunos-x64@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.19.2.tgz#52a2ac8ac6284c02d25df22bb4cfde26fbddd68d" + resolved "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.19.2.tgz#52a2ac8ac6284c02d25df22bb4cfde26fbddd68d" integrity sha512-VXSSMsmb+Z8LbsQGcBMiM+fYObDNRm8p7tkUDMPG/g4fhFX5DEFmjxIEa3N8Zr96SjsJ1woAhF0DUnS3MF3ARw== "@esbuild/win32-arm64@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.17.19.tgz#9aa9dc074399288bdcdd283443e9aeb6b9552b6f" + resolved "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.17.19.tgz#9aa9dc074399288bdcdd283443e9aeb6b9552b6f" integrity sha512-yYx+8jwowUstVdorcMdNlzklLYhPxjniHWFKgRqH7IFlUEa0Umu3KuYplf1HUZZ422e3NU9F4LGb+4O0Kdcaag== "@esbuild/win32-arm64@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.19.2.tgz#719ed5870855de8537aef8149694a97d03486804" + resolved "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.19.2.tgz#719ed5870855de8537aef8149694a97d03486804" integrity sha512-5NayUlSAyb5PQYFAU9x3bHdsqB88RC3aM9lKDAz4X1mo/EchMIT1Q+pSeBXNgkfNmRecLXA0O8xP+x8V+g/LKg== "@esbuild/win32-ia32@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.17.19.tgz#95ad43c62ad62485e210f6299c7b2571e48d2b03" + resolved "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.17.19.tgz#95ad43c62ad62485e210f6299c7b2571e48d2b03" integrity sha512-eggDKanJszUtCdlVs0RB+h35wNlb5v4TWEkq4vZcmVt5u/HiDZrTXe2bWFQUez3RgNHwx/x4sk5++4NSSicKkw== "@esbuild/win32-ia32@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.19.2.tgz#24832223880b0f581962c8660f8fb8797a1e046a" + resolved "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.19.2.tgz#24832223880b0f581962c8660f8fb8797a1e046a" integrity sha512-47gL/ek1v36iN0wL9L4Q2MFdujR0poLZMJwhO2/N3gA89jgHp4MR8DKCmwYtGNksbfJb9JoTtbkoe6sDhg2QTA== "@esbuild/win32-x64@0.17.19": version "0.17.19" - resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.17.19.tgz#8cfaf2ff603e9aabb910e9c0558c26cf32744061" + resolved "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.17.19.tgz#8cfaf2ff603e9aabb910e9c0558c26cf32744061" integrity sha512-lAhycmKnVOuRYNtRtatQR1LPQf2oYCkRGkSFnseDAKPl8lu5SOsK/e1sXe5a0Pc5kHIHe6P2I/ilntNv2xf3cA== "@esbuild/win32-x64@0.19.2": version "0.19.2" - resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.19.2.tgz#1205014625790c7ff0e471644a878a65d1e34ab0" + resolved "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.19.2.tgz#1205014625790c7ff0e471644a878a65d1e34ab0" integrity sha512-tcuhV7ncXBqbt/Ybf0IyrMcwVOAPDckMK9rXNHtF17UTK18OKLpg08glminN06pt2WCoALhXdLfSPbVvK/6fxw== "@eslint-community/eslint-utils@^4.2.0", "@eslint-community/eslint-utils@^4.4.0": version "4.4.0" - resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" + resolved "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" integrity sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA== dependencies: eslint-visitor-keys "^3.3.0" -"@eslint-community/regexpp@^4.4.0": - version "4.5.1" - resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.5.1.tgz#cdd35dce4fa1a89a4fd42b1599eb35b3af408884" - integrity sha512-Z5ba73P98O1KUYCCJTUeVpja9RcGoMdncZ6T49FCUl2lN38JtCJ+3WgIDBv0AuY4WChU5PmtJmOCTlN6FZTFKQ== +"@eslint-community/regexpp@^4.4.0", "@eslint-community/regexpp@^4.6.1": + version "4.10.0" + resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.10.0.tgz#548f6de556857c8bb73bbee70c35dc82a2e74d63" + integrity sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA== -"@eslint/eslintrc@^2.0.3": - version "2.0.3" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-2.0.3.tgz#4910db5505f4d503f27774bf356e3704818a0331" - integrity sha512-+5gy6OQfk+xx3q0d6jGZZC3f3KzAkXc/IanVxd1is/VIIziRqqt3ongQz0FiTUXqTk0c7aDB3OaFuKnuSoJicQ== +"@eslint/eslintrc@^2.1.2": + version "2.1.2" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-2.1.2.tgz#c6936b4b328c64496692f76944e755738be62396" + integrity sha512-+wvgpDsrB1YqAMdEUCcnTlpfVBH7Vqn6A/NT3D8WVXFIaKMlErPIZT3oCIAVCOtarRpMtelZLqJeU3t7WY6X6g== dependencies: ajv "^6.12.4" debug "^4.3.2" - espree "^9.5.2" + espree "^9.6.0" globals "^13.19.0" ignore "^5.2.0" import-fresh "^3.2.1" @@ -667,14 +646,14 @@ minimatch "^3.1.2" strip-json-comments "^3.1.1" -"@eslint/js@8.42.0": - version "8.42.0" - resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.42.0.tgz#484a1d638de2911e6f5a30c12f49c7e4a3270fb6" - integrity sha512-6SWlXpWU5AvId8Ac7zjzmIOqMOba/JWY8XZ4A7q7Gn1Vlfg/SFFIlrtHXt9nPn4op9ZPAkl91Jao+QQv3r/ukw== +"@eslint/js@8.52.0": + version "8.52.0" + resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.52.0.tgz#78fe5f117840f69dc4a353adf9b9cd926353378c" + integrity sha512-mjZVbpaeMZludF2fsWLD0Z9gCref1Tk4i9+wddjRvpUNqqcndPkBD09N/Mapey0b3jaXbLm2kICwFv2E64QinA== "@gulp-sourcemaps/identity-map@^2.0.1": version "2.0.1" - resolved "https://registry.yarnpkg.com/@gulp-sourcemaps/identity-map/-/identity-map-2.0.1.tgz#a6e8b1abec8f790ec6be2b8c500e6e68037c0019" + resolved "https://registry.npmjs.org/@gulp-sourcemaps/identity-map/-/identity-map-2.0.1.tgz#a6e8b1abec8f790ec6be2b8c500e6e68037c0019" integrity sha512-Tb+nSISZku+eQ4X1lAkevcQa+jknn/OVUgZ3XCxEKIsLsqYuPoJwJOPQeaOk75X3WPftb29GWY1eqE7GLsXb1Q== dependencies: acorn "^6.4.1" @@ -685,34 +664,34 @@ "@gulp-sourcemaps/map-sources@^1.0.0": version "1.0.0" - resolved "https://registry.yarnpkg.com/@gulp-sourcemaps/map-sources/-/map-sources-1.0.0.tgz#890ae7c5d8c877f6d384860215ace9d7ec945bda" + resolved "https://registry.npmjs.org/@gulp-sourcemaps/map-sources/-/map-sources-1.0.0.tgz#890ae7c5d8c877f6d384860215ace9d7ec945bda" integrity sha512-o/EatdaGt8+x2qpb0vFLC/2Gug/xYPRXb6a+ET1wGYKozKN3krDWC/zZFZAtrzxJHuDL12mwdfEFKcKMNvc55A== dependencies: normalize-path "^2.0.1" through2 "^2.0.3" -"@humanwhocodes/config-array@^0.11.10": - version "0.11.10" - resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.10.tgz#5a3ffe32cc9306365fb3fd572596cd602d5e12d2" - integrity sha512-KVVjQmNUepDVGXNuoRRdmmEjruj0KfiGSbS8LVc12LMsWDQzRXJ0qdhN8L8uUigKpfEHRhlaQFY0ib1tnUbNeQ== +"@humanwhocodes/config-array@^0.11.13": + version "0.11.13" + resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.13.tgz#075dc9684f40a531d9b26b0822153c1e832ee297" + integrity sha512-JSBDMiDKSzQVngfRjOdFXgFfklaXI4K9nLF49Auh21lmBWRLIK3+xTErTWD4KU54pb6coM6ESE7Awz/FNU3zgQ== dependencies: - "@humanwhocodes/object-schema" "^1.2.1" + "@humanwhocodes/object-schema" "^2.0.1" debug "^4.1.1" minimatch "^3.0.5" "@humanwhocodes/module-importer@^1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" + resolved "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== -"@humanwhocodes/object-schema@^1.2.1": - version "1.2.1" - resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" - integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== +"@humanwhocodes/object-schema@^2.0.1": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-2.0.1.tgz#e5211452df060fa8522b55c7b3c0c4d1981cb044" + integrity sha512-dvuCeX5fC9dXgJn9t+X5atfmgQAzUOWqS1254Gh0m6i8wKd10ebXkfNKiRK+1GWi/yTvvLDHpoxLr0xxxeslWw== "@isaacs/cliui@^8.0.2": version "8.0.2" - resolved "https://registry.yarnpkg.com/@isaacs/cliui/-/cliui-8.0.2.tgz#b37667b7bc181c168782259bab42474fbf52b550" + resolved "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz#b37667b7bc181c168782259bab42474fbf52b550" integrity sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA== dependencies: string-width "^5.1.2" @@ -724,7 +703,7 @@ "@istanbuljs/load-nyc-config@^1.0.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + resolved "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== dependencies: camelcase "^5.3.1" @@ -735,112 +714,112 @@ "@istanbuljs/schema@^0.1.2": version "0.1.3" - resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + resolved "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== -"@jest/console@^29.6.2": - version "29.6.2" - resolved "https://registry.yarnpkg.com/@jest/console/-/console-29.6.2.tgz#bf1d4101347c23e07c029a1b1ae07d550f5cc541" - integrity sha512-0N0yZof5hi44HAR2pPS+ikJ3nzKNoZdVu8FffRf3wy47I7Dm7etk/3KetMdRUqzVd16V4O2m2ISpNTbnIuqy1w== +"@jest/console@^29.7.0": + version "29.7.0" + resolved "https://registry.npmjs.org/@jest/console/-/console-29.7.0.tgz#cd4822dbdb84529265c5a2bdb529a3c9cc950ffc" + integrity sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg== dependencies: - "@jest/types" "^29.6.1" + "@jest/types" "^29.6.3" "@types/node" "*" chalk "^4.0.0" - jest-message-util "^29.6.2" - jest-util "^29.6.2" + jest-message-util "^29.7.0" + jest-util "^29.7.0" slash "^3.0.0" -"@jest/core@^29.6.2": - version "29.6.2" - resolved "https://registry.yarnpkg.com/@jest/core/-/core-29.6.2.tgz#6f2d1dbe8aa0265fcd4fb8082ae1952f148209c8" - integrity sha512-Oj+5B+sDMiMWLhPFF+4/DvHOf+U10rgvCLGPHP8Xlsy/7QxS51aU/eBngudHlJXnaWD5EohAgJ4js+T6pa+zOg== +"@jest/core@^29.6.2", "@jest/core@^29.7.0": + version "29.7.0" + resolved "https://registry.npmjs.org/@jest/core/-/core-29.7.0.tgz#b6cccc239f30ff36609658c5a5e2291757ce448f" + integrity sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg== dependencies: - "@jest/console" "^29.6.2" - "@jest/reporters" "^29.6.2" - "@jest/test-result" "^29.6.2" - "@jest/transform" "^29.6.2" - "@jest/types" "^29.6.1" + "@jest/console" "^29.7.0" + "@jest/reporters" "^29.7.0" + "@jest/test-result" "^29.7.0" + "@jest/transform" "^29.7.0" + "@jest/types" "^29.6.3" "@types/node" "*" ansi-escapes "^4.2.1" chalk "^4.0.0" ci-info "^3.2.0" exit "^0.1.2" graceful-fs "^4.2.9" - jest-changed-files "^29.5.0" - jest-config "^29.6.2" - jest-haste-map "^29.6.2" - jest-message-util "^29.6.2" - jest-regex-util "^29.4.3" - jest-resolve "^29.6.2" - jest-resolve-dependencies "^29.6.2" - jest-runner "^29.6.2" - jest-runtime "^29.6.2" - jest-snapshot "^29.6.2" - jest-util "^29.6.2" - jest-validate "^29.6.2" - jest-watcher "^29.6.2" + jest-changed-files "^29.7.0" + jest-config "^29.7.0" + jest-haste-map "^29.7.0" + jest-message-util "^29.7.0" + jest-regex-util "^29.6.3" + jest-resolve "^29.7.0" + jest-resolve-dependencies "^29.7.0" + jest-runner "^29.7.0" + jest-runtime "^29.7.0" + jest-snapshot "^29.7.0" + jest-util "^29.7.0" + jest-validate "^29.7.0" + jest-watcher "^29.7.0" micromatch "^4.0.4" - pretty-format "^29.6.2" + pretty-format "^29.7.0" slash "^3.0.0" strip-ansi "^6.0.0" -"@jest/environment@^29.6.2": - version "29.6.2" - resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-29.6.2.tgz#794c0f769d85e7553439d107d3f43186dc6874a9" - integrity sha512-AEcW43C7huGd/vogTddNNTDRpO6vQ2zaQNrttvWV18ArBx9Z56h7BIsXkNFJVOO4/kblWEQz30ckw0+L3izc+Q== +"@jest/environment@^29.7.0": + version "29.7.0" + resolved "https://registry.npmjs.org/@jest/environment/-/environment-29.7.0.tgz#24d61f54ff1f786f3cd4073b4b94416383baf2a7" + integrity sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw== dependencies: - "@jest/fake-timers" "^29.6.2" - "@jest/types" "^29.6.1" + "@jest/fake-timers" "^29.7.0" + "@jest/types" "^29.6.3" "@types/node" "*" - jest-mock "^29.6.2" + jest-mock "^29.7.0" -"@jest/expect-utils@^29.6.2": - version "29.6.2" - resolved "https://registry.yarnpkg.com/@jest/expect-utils/-/expect-utils-29.6.2.tgz#1b97f290d0185d264dd9fdec7567a14a38a90534" - integrity sha512-6zIhM8go3RV2IG4aIZaZbxwpOzz3ZiM23oxAlkquOIole+G6TrbeXnykxWYlqF7kz2HlBjdKtca20x9atkEQYg== +"@jest/expect-utils@^29.7.0": + version "29.7.0" + resolved "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.7.0.tgz#023efe5d26a8a70f21677d0a1afc0f0a44e3a1c6" + integrity sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA== dependencies: - jest-get-type "^29.4.3" + jest-get-type "^29.6.3" -"@jest/expect@^29.6.2": - version "29.6.2" - resolved "https://registry.yarnpkg.com/@jest/expect/-/expect-29.6.2.tgz#5a2ad58bb345165d9ce0a1845bbf873c480a4b28" - integrity sha512-m6DrEJxVKjkELTVAztTLyS/7C92Y2b0VYqmDROYKLLALHn8T/04yPs70NADUYPrV3ruI+H3J0iUIuhkjp7vkfg== +"@jest/expect@^29.7.0": + version "29.7.0" + resolved "https://registry.npmjs.org/@jest/expect/-/expect-29.7.0.tgz#76a3edb0cb753b70dfbfe23283510d3d45432bf2" + integrity sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ== dependencies: - expect "^29.6.2" - jest-snapshot "^29.6.2" + expect "^29.7.0" + jest-snapshot "^29.7.0" -"@jest/fake-timers@^29.6.2": - version "29.6.2" - resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-29.6.2.tgz#fe9d43c5e4b1b901168fe6f46f861b3e652a2df4" - integrity sha512-euZDmIlWjm1Z0lJ1D0f7a0/y5Kh/koLFMUBE5SUYWrmy8oNhJpbTBDAP6CxKnadcMLDoDf4waRYCe35cH6G6PA== +"@jest/fake-timers@^29.7.0": + version "29.7.0" + resolved "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.7.0.tgz#fd91bf1fffb16d7d0d24a426ab1a47a49881a565" + integrity sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ== dependencies: - "@jest/types" "^29.6.1" + "@jest/types" "^29.6.3" "@sinonjs/fake-timers" "^10.0.2" "@types/node" "*" - jest-message-util "^29.6.2" - jest-mock "^29.6.2" - jest-util "^29.6.2" + jest-message-util "^29.7.0" + jest-mock "^29.7.0" + jest-util "^29.7.0" -"@jest/globals@^29.6.2": - version "29.6.2" - resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-29.6.2.tgz#74af81b9249122cc46f1eb25793617eec69bf21a" - integrity sha512-cjuJmNDjs6aMijCmSa1g2TNG4Lby/AeU7/02VtpW+SLcZXzOLK2GpN2nLqcFjmhy3B3AoPeQVx7BnyOf681bAw== +"@jest/globals@^29.7.0": + version "29.7.0" + resolved "https://registry.npmjs.org/@jest/globals/-/globals-29.7.0.tgz#8d9290f9ec47ff772607fa864ca1d5a2efae1d4d" + integrity sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ== dependencies: - "@jest/environment" "^29.6.2" - "@jest/expect" "^29.6.2" - "@jest/types" "^29.6.1" - jest-mock "^29.6.2" + "@jest/environment" "^29.7.0" + "@jest/expect" "^29.7.0" + "@jest/types" "^29.6.3" + jest-mock "^29.7.0" -"@jest/reporters@^29.6.2": - version "29.6.2" - resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-29.6.2.tgz#524afe1d76da33d31309c2c4a2c8062d0c48780a" - integrity sha512-sWtijrvIav8LgfJZlrGCdN0nP2EWbakglJY49J1Y5QihcQLfy7ovyxxjJBRXMNltgt4uPtEcFmIMbVshEDfFWw== +"@jest/reporters@^29.7.0": + version "29.7.0" + resolved "https://registry.npmjs.org/@jest/reporters/-/reporters-29.7.0.tgz#04b262ecb3b8faa83b0b3d321623972393e8f4c7" + integrity sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg== dependencies: "@bcoe/v8-coverage" "^0.2.3" - "@jest/console" "^29.6.2" - "@jest/test-result" "^29.6.2" - "@jest/transform" "^29.6.2" - "@jest/types" "^29.6.1" + "@jest/console" "^29.7.0" + "@jest/test-result" "^29.7.0" + "@jest/transform" "^29.7.0" + "@jest/types" "^29.6.3" "@jridgewell/trace-mapping" "^0.3.18" "@types/node" "*" chalk "^4.0.0" @@ -849,70 +828,70 @@ glob "^7.1.3" graceful-fs "^4.2.9" istanbul-lib-coverage "^3.0.0" - istanbul-lib-instrument "^5.1.0" + istanbul-lib-instrument "^6.0.0" istanbul-lib-report "^3.0.0" istanbul-lib-source-maps "^4.0.0" istanbul-reports "^3.1.3" - jest-message-util "^29.6.2" - jest-util "^29.6.2" - jest-worker "^29.6.2" + jest-message-util "^29.7.0" + jest-util "^29.7.0" + jest-worker "^29.7.0" slash "^3.0.0" string-length "^4.0.1" strip-ansi "^6.0.0" v8-to-istanbul "^9.0.1" -"@jest/schemas@^29.6.0": - version "29.6.0" - resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-29.6.0.tgz#0f4cb2c8e3dca80c135507ba5635a4fd755b0040" - integrity sha512-rxLjXyJBTL4LQeJW3aKo0M/+GkCOXsO+8i9Iu7eDb6KwtP65ayoDsitrdPBtujxQ88k4wI2FNYfa6TOGwSn6cQ== +"@jest/schemas@^29.6.3": + version "29.6.3" + resolved "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz#430b5ce8a4e0044a7e3819663305a7b3091c8e03" + integrity sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA== dependencies: "@sinclair/typebox" "^0.27.8" -"@jest/source-map@^29.6.0": - version "29.6.0" - resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-29.6.0.tgz#bd34a05b5737cb1a99d43e1957020ac8e5b9ddb1" - integrity sha512-oA+I2SHHQGxDCZpbrsCQSoMLb3Bz547JnM+jUr9qEbuw0vQlWZfpPS7CO9J7XiwKicEz9OFn/IYoLkkiUD7bzA== +"@jest/source-map@^29.6.3": + version "29.6.3" + resolved "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz#d90ba772095cf37a34a5eb9413f1b562a08554c4" + integrity sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw== dependencies: "@jridgewell/trace-mapping" "^0.3.18" callsites "^3.0.0" graceful-fs "^4.2.9" -"@jest/test-result@^29.6.2": - version "29.6.2" - resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-29.6.2.tgz#fdd11583cd1608e4db3114e8f0cce277bf7a32ed" - integrity sha512-3VKFXzcV42EYhMCsJQURptSqnyjqCGbtLuX5Xxb6Pm6gUf1wIRIl+mandIRGJyWKgNKYF9cnstti6Ls5ekduqw== +"@jest/test-result@^29.7.0": + version "29.7.0" + resolved "https://registry.npmjs.org/@jest/test-result/-/test-result-29.7.0.tgz#8db9a80aa1a097bb2262572686734baed9b1657c" + integrity sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA== dependencies: - "@jest/console" "^29.6.2" - "@jest/types" "^29.6.1" + "@jest/console" "^29.7.0" + "@jest/types" "^29.6.3" "@types/istanbul-lib-coverage" "^2.0.0" collect-v8-coverage "^1.0.0" -"@jest/test-sequencer@^29.6.2": - version "29.6.2" - resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-29.6.2.tgz#585eff07a68dd75225a7eacf319780cb9f6b9bf4" - integrity sha512-GVYi6PfPwVejO7slw6IDO0qKVum5jtrJ3KoLGbgBWyr2qr4GaxFV6su+ZAjdTX75Sr1DkMFRk09r2ZVa+wtCGw== +"@jest/test-sequencer@^29.7.0": + version "29.7.0" + resolved "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz#6cef977ce1d39834a3aea887a1726628a6f072ce" + integrity sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw== dependencies: - "@jest/test-result" "^29.6.2" + "@jest/test-result" "^29.7.0" graceful-fs "^4.2.9" - jest-haste-map "^29.6.2" + jest-haste-map "^29.7.0" slash "^3.0.0" -"@jest/transform@^29.6.2": - version "29.6.2" - resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-29.6.2.tgz#522901ebbb211af08835bc3bcdf765ab778094e3" - integrity sha512-ZqCqEISr58Ce3U+buNFJYUktLJZOggfyvR+bZMaiV1e8B1SIvJbwZMrYz3gx/KAPn9EXmOmN+uB08yLCjWkQQg== +"@jest/transform@^29.7.0": + version "29.7.0" + resolved "https://registry.npmjs.org/@jest/transform/-/transform-29.7.0.tgz#df2dd9c346c7d7768b8a06639994640c642e284c" + integrity sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw== dependencies: "@babel/core" "^7.11.6" - "@jest/types" "^29.6.1" + "@jest/types" "^29.6.3" "@jridgewell/trace-mapping" "^0.3.18" babel-plugin-istanbul "^6.1.1" chalk "^4.0.0" convert-source-map "^2.0.0" fast-json-stable-stringify "^2.1.0" graceful-fs "^4.2.9" - jest-haste-map "^29.6.2" - jest-regex-util "^29.4.3" - jest-util "^29.6.2" + jest-haste-map "^29.7.0" + jest-regex-util "^29.6.3" + jest-util "^29.7.0" micromatch "^4.0.4" pirates "^4.0.4" slash "^3.0.0" @@ -920,7 +899,7 @@ "@jest/types@^26.6.2": version "26.6.2" - resolved "https://registry.yarnpkg.com/@jest/types/-/types-26.6.2.tgz#bef5a532030e1d88a2f5a6d933f84e97226ed48e" + resolved "https://registry.npmjs.org/@jest/types/-/types-26.6.2.tgz#bef5a532030e1d88a2f5a6d933f84e97226ed48e" integrity sha512-fC6QCp7Sc5sX6g8Tvbmj4XUTbyrik0akgRy03yjXbQaBWWNWGE7SGtJk98m0N8nzegD/7SggrUlivxo5ax4KWQ== dependencies: "@types/istanbul-lib-coverage" "^2.0.0" @@ -929,12 +908,12 @@ "@types/yargs" "^15.0.0" chalk "^4.0.0" -"@jest/types@^29.6.1": - version "29.6.1" - resolved "https://registry.yarnpkg.com/@jest/types/-/types-29.6.1.tgz#ae79080278acff0a6af5eb49d063385aaa897bf2" - integrity sha512-tPKQNMPuXgvdOn2/Lg9HNfUvjYVGolt04Hp03f5hAk878uwOLikN+JzeLY0HcVgKgFl9Hs3EIqpu3WX27XNhnw== +"@jest/types@^29.6.1", "@jest/types@^29.6.3": + version "29.6.3" + resolved "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz#1131f8cf634e7e84c5e77bab12f052af585fba59" + integrity sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw== dependencies: - "@jest/schemas" "^29.6.0" + "@jest/schemas" "^29.6.3" "@types/istanbul-lib-coverage" "^2.0.0" "@types/istanbul-reports" "^3.0.0" "@types/node" "*" @@ -943,65 +922,47 @@ "@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": version "0.3.3" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" + resolved "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== dependencies: "@jridgewell/set-array" "^1.0.1" "@jridgewell/sourcemap-codec" "^1.4.10" "@jridgewell/trace-mapping" "^0.3.9" -"@jridgewell/resolve-uri@3.1.0": - version "3.1.0" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" - integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== - "@jridgewell/resolve-uri@^3.0.3", "@jridgewell/resolve-uri@^3.1.0": version "3.1.1" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz#c08679063f279615a3326583ba3a90d1d82cc721" + resolved "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz#c08679063f279615a3326583ba3a90d1d82cc721" integrity sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA== "@jridgewell/set-array@^1.0.1": version "1.1.2" - resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + resolved "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== "@jridgewell/source-map@^0.3.3": - version "0.3.3" - resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.3.tgz#8108265659d4c33e72ffe14e33d6cc5eb59f2fda" - integrity sha512-b+fsZXeLYi9fEULmfBrhxn4IrPlINf8fiNarzTof004v3lFdntdwa9PF7vFJqm3mg7s+ScJMxXaE3Acp1irZcg== + version "0.3.5" + resolved "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.5.tgz#a3bb4d5c6825aab0d281268f47f6ad5853431e91" + integrity sha512-UTYAUj/wviwdsMfzoSJspJxbkH5o1snzwX0//0ENX1u/55kkZZkcTZP6u9bwKGkv+dkk9at4m1Cpt0uY80kcpQ== dependencies: "@jridgewell/gen-mapping" "^0.3.0" "@jridgewell/trace-mapping" "^0.3.9" -"@jridgewell/sourcemap-codec@1.4.14": - version "1.4.14" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" - integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== - "@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14": version "1.4.15" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" + resolved "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== "@jridgewell/trace-mapping@0.3.9": version "0.3.9" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9" + resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9" integrity sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ== dependencies: "@jridgewell/resolve-uri" "^3.0.3" "@jridgewell/sourcemap-codec" "^1.4.10" -"@jridgewell/trace-mapping@^0.3.12", "@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.9": - version "0.3.18" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz#25783b2086daf6ff1dcb53c9249ae480e4dd4cd6" - integrity sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA== - dependencies: - "@jridgewell/resolve-uri" "3.1.0" - "@jridgewell/sourcemap-codec" "1.4.14" - -"@jridgewell/trace-mapping@^0.3.18": +"@jridgewell/trace-mapping@^0.3.12", "@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.18", "@jridgewell/trace-mapping@^0.3.9": version "0.3.19" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.19.tgz#f8a3249862f91be48d3127c3cfe992f79b4b8811" + resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.19.tgz#f8a3249862f91be48d3127c3cfe992f79b4b8811" integrity sha512-kf37QtfW+Hwx/buWGMPcR60iF9ziHa6r/CZJIHbmcm4+0qrXiVdxegAH0F6yddEVQ7zdkjcGCgCzUu+BcbhQxw== dependencies: "@jridgewell/resolve-uri" "^3.1.0" @@ -1009,7 +970,7 @@ "@nodelib/fs.scandir@2.1.5": version "2.1.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== dependencies: "@nodelib/fs.stat" "2.0.5" @@ -1017,12 +978,12 @@ "@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": version "2.0.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + resolved "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== "@nodelib/fs.walk@^1.2.3", "@nodelib/fs.walk@^1.2.8": version "1.2.8" - resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + resolved "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== dependencies: "@nodelib/fs.scandir" "2.1.5" @@ -1030,29 +991,29 @@ "@openpgp/web-stream-tools@0.0.13": version "0.0.13" - resolved "https://registry.yarnpkg.com/@openpgp/web-stream-tools/-/web-stream-tools-0.0.13.tgz#f0be50120c152efb11b65df29ab482dc192dbbd7" + resolved "https://registry.npmjs.org/@openpgp/web-stream-tools/-/web-stream-tools-0.0.13.tgz#f0be50120c152efb11b65df29ab482dc192dbbd7" integrity sha512-VQ0O0lUcD9ilLcMLQMJMgPhp8fDgMd4copd+UhSBGjud0vbI1ONQ3ffAhixEMml/AApLJtqCpd7PJcccPliFSA== "@pkgjs/parseargs@^0.11.0": version "0.11.0" - resolved "https://registry.yarnpkg.com/@pkgjs/parseargs/-/parseargs-0.11.0.tgz#a77ea742fab25775145434eb1d2328cf5013ac33" + resolved "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz#a77ea742fab25775145434eb1d2328cf5013ac33" integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg== "@polka/url@^1.0.0-next.20": - version "1.0.0-next.21" - resolved "https://registry.yarnpkg.com/@polka/url/-/url-1.0.0-next.21.tgz#5de5a2385a35309427f6011992b544514d559aa1" - integrity sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g== + version "1.0.0-next.23" + resolved "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.23.tgz#498e41218ab3b6a1419c735e5c6ae2c5ed609b6c" + integrity sha512-C16M+IYz0rgRhWZdCmK+h58JMv8vijAA61gmz2rspCSwKwzBebpdcsiUmwrtJRdphuY30i6BSLEOP8ppbNLyLg== "@rollup/plugin-alias@5.0.0": version "5.0.0" - resolved "https://registry.yarnpkg.com/@rollup/plugin-alias/-/plugin-alias-5.0.0.tgz#70f3d504bd17d8922e35c6b61c08b40a6ec25af2" + resolved "https://registry.npmjs.org/@rollup/plugin-alias/-/plugin-alias-5.0.0.tgz#70f3d504bd17d8922e35c6b61c08b40a6ec25af2" integrity sha512-l9hY5chSCjuFRPsnRm16twWBiSApl2uYFLsepQYwtBuAxNMQ/1dJqADld40P0Jkqm65GRTLy/AC6hnpVebtLsA== dependencies: slash "^4.0.0" "@rollup/plugin-node-resolve@15.1.0": version "15.1.0" - resolved "https://registry.yarnpkg.com/@rollup/plugin-node-resolve/-/plugin-node-resolve-15.1.0.tgz#9ffcd8e8c457080dba89bb9fcb583a6778dc757e" + resolved "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-15.1.0.tgz#9ffcd8e8c457080dba89bb9fcb583a6778dc757e" integrity sha512-xeZHCgsiZ9pzYVgAo9580eCGqwh/XCEUM9q6iQfGNocjgkufHAqC3exA+45URvhiYV8sBF9RlBai650eNs7AsA== dependencies: "@rollup/pluginutils" "^5.0.1" @@ -1063,61 +1024,201 @@ resolve "^1.22.1" "@rollup/pluginutils@^5.0.1": - version "5.0.2" - resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-5.0.2.tgz#012b8f53c71e4f6f9cb317e311df1404f56e7a33" - integrity sha512-pTd9rIsP92h+B6wWwFbW8RkZv4hiR/xKsqre4SIuAOaOEQRxi0lqLke9k2/7WegC85GgUs9pjmOjCUi3In4vwA== + version "5.0.4" + resolved "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.0.4.tgz#74f808f9053d33bafec0cc98e7b835c9667d32ba" + integrity sha512-0KJnIoRI8A+a1dqOYLxH8vBf8bphDmty5QvIm2hqm7oFCFYKCAZWWd2hXgMibaPsNDhI0AtpYfQZJG47pt/k4g== dependencies: "@types/estree" "^1.0.0" estree-walker "^2.0.2" picomatch "^2.3.1" -"@rollup/stream@3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@rollup/stream/-/stream-3.0.0.tgz#8578177aa992e1e7d932d7b8b9ac302ce272663e" - integrity sha512-rCctaa32QWDmbO/NyZPE3fKbpi4RHmR3N/mPvCYaig7ieSsDg4uSudKkIsaDHuLuYpvNII8kPwgXs1k077lAPg== +"@rollup/rollup-android-arm-eabi@4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.3.0.tgz#8ad8a660b18f1a24ad4a272738a65ac4788a8811" + integrity sha512-/4pns6BYi8MXdwnXM44yoGAcFYVHL/BYlB2q1HXZ6AzH++LaiEVWFpBWQ/glXhbMbv3E3o09igrHFbP/snhAvA== + +"@rollup/rollup-android-arm64@4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.3.0.tgz#17b0f412034d14668c8acc8b7cbd8b1c76279599" + integrity sha512-nLO/JsL9idr416vzi3lHm3Xm+QZh4qHij8k3Er13kZr5YhL7/+kBAx84kDmPc7HMexLmwisjDCeDIKNFp8mDlQ== + +"@rollup/rollup-darwin-arm64@4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.3.0.tgz#80c4a4dd7b120906d4e655808fb9005784a8bf35" + integrity sha512-dGhVBlllt4iHwTGy21IEoMOTN5wZoid19zEIxsdY29xcEiOEHqzDa7Sqrkh5OE7LKCowL61eFJXxYe/+pYa7ZQ== + +"@rollup/rollup-darwin-x64@4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.3.0.tgz#52ad0db40d9b5ae047dfc08e54e4b3f42feaef82" + integrity sha512-h8wRfHeLEbU3NzaP1Oku7BYXCJQiTRr+8U0lklyOQXxXiEpHLL8tk1hFl+tezoRKLcPJD7joKaK74ASsqt3Ekg== + +"@rollup/rollup-linux-arm-gnueabihf@4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.3.0.tgz#2ad3d190af01d7fc8704e8e782c4a24006a9f21a" + integrity sha512-wP4VgR/gfV18sylTuym3sxRTkAgUR2vh6YLeX/GEznk5jCYcYSlx585XlcUcl0c8UffIZlRJ09raWSX3JDb4GA== + +"@rollup/rollup-linux-arm64-gnu@4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.3.0.tgz#4f7ba42f779f06e93876755b7393c61676e2958a" + integrity sha512-v/14JCYVkqRSJeQbxFx4oUkwVQQw6lFMN7bd4vuARBc3X2lmomkxBsc+BFiIDL/BK+CTx5AOh/k9XmqDnKWRVg== + +"@rollup/rollup-linux-arm64-musl@4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.3.0.tgz#64795a09dac02b4d779819509a793b93ba7e4c0d" + integrity sha512-tNhfYqFH5OxtRzfkTOKdgFYlPSZnlDLNW4+leNEvQZhwTJxoTwsZAAhR97l3qVry/kkLyJPBK+Q8EAJLPinDIg== + +"@rollup/rollup-linux-x64-gnu@4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.3.0.tgz#00c1ff131ba16881eb1a0ad46b0aa10dcacb010e" + integrity sha512-pw77m8QywdsoFdFOgmc8roF1inBI0rciqzO8ffRUgLoq7+ee9o5eFqtEcS6hHOOplgifAUUisP8cAnwl9nUYPw== + +"@rollup/rollup-linux-x64-musl@4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.3.0.tgz#89479dce5e5bf6850fbca92fa7f1637ddd70c9ef" + integrity sha512-tJs7v2MnV2F8w6X1UpPHl/43OfxjUy9SuJ2ZPoxn79v9vYteChVYO/ueLHCpRMmyTUIVML3N9z4azl9ENH8Xxg== + +"@rollup/rollup-win32-arm64-msvc@4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.3.0.tgz#1a36aba17c7efe6d61e98b8049e70b40e33b1f45" + integrity sha512-OKGxp6kATQdTyI2DF+e9s+hB3/QZB45b6e+dzcfW1SUqiF6CviWyevhmT4USsMEdP3mlpC9zxLz3Oh+WaTMOSw== + +"@rollup/rollup-win32-ia32-msvc@4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.3.0.tgz#a0b1f79afde51e390a7725b7c15ab4e0df780aea" + integrity sha512-DDZ5AH68JJ2ClQFEA1aNnfA7Ybqyeh0644rGbrLOdNehTmzfICHiWSn0OprzYi9HAshTPQvlwrM+bi2kuaIOjQ== + +"@rollup/rollup-win32-x64-msvc@4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.3.0.tgz#0b9bcc159b93c911efb5a2c39ec5d70dd0a589dc" + integrity sha512-dMvGV8p92GQ8jhNlGIKpyhVZPzJlT258pPrM5q2F8lKcc9Iv9BbfdnhX1OfinYWnb9ms5zLw6MlaMnqLfUkKnQ== + +"@rollup/stream@3.0.1": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@rollup/stream/-/stream-3.0.1.tgz#485452d6f1016ac1b0513060f90ff02aaca3e1c0" + integrity sha512-wdzoakLc9UiPOFa1k17ukfEtvQ0p7JuNFvOZT1DhO5Z5CrTf71An01U9+v+aebYcaLCwy3tLwpCSUF7K7xVN0A== "@sinclair/typebox@^0.27.8": version "0.27.8" - resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.27.8.tgz#6667fac16c436b5434a387a34dedb013198f6e6e" + resolved "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz#6667fac16c436b5434a387a34dedb013198f6e6e" integrity sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA== "@sinonjs/commons@^3.0.0": version "3.0.0" - resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-3.0.0.tgz#beb434fe875d965265e04722ccfc21df7f755d72" + resolved "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.0.tgz#beb434fe875d965265e04722ccfc21df7f755d72" integrity sha512-jXBtWAF4vmdNmZgD5FoKsVLv3rPgDnLgPbU84LIJ3otV44vJlDRokVng5v8NFJdCf/da9legHcKaRuZs4L7faA== dependencies: type-detect "4.0.8" "@sinonjs/fake-timers@^10.0.2": - version "10.2.0" - resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-10.2.0.tgz#b3e322a34c5f26e3184e7f6115695f299c1b1194" - integrity sha512-OPwQlEdg40HAj5KNF8WW6q2KG4Z+cBCZb3m4ninfTZKaBmbIJodviQsDBoYMPHkOyJJMHnOJo5j2+LKDOhOACg== + version "10.3.0" + resolved "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz#55fdff1ecab9f354019129daf4df0dd4d923ea66" + integrity sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA== dependencies: "@sinonjs/commons" "^3.0.0" +"@swc/core-darwin-arm64@1.3.82": + version "1.3.82" + resolved "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.82.tgz#bbf9874747b51053d8a59ea26c3e235c326f24a3" + integrity sha512-JfsyDW34gVKD3uE0OUpUqYvAD3yseEaicnFP6pB292THtLJb0IKBBnK50vV/RzEJtc1bR3g1kNfxo2PeurZTrA== + +"@swc/core-darwin-x64@1.3.82": + version "1.3.82" + resolved "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.3.82.tgz#145cdde16678e0d793620035783e5b413a16ac43" + integrity sha512-ogQWgNMq7qTpITjcP3dnzkFNj7bh6SwMr859GvtOTrE75H7L7jDWxESfH4f8foB/LGxBKiDNmxKhitCuAsZK4A== + +"@swc/core-linux-arm-gnueabihf@1.3.82": + version "1.3.82" + resolved "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.82.tgz#0c2f32c5793f2ac8e8ccf416aec84d016c30ef7b" + integrity sha512-7TMXG1lXlNhD0kUiEqs+YlGV4irAdBa2quuy+XI3oJf2fBK6dQfEq4xBy65B3khrorzQS3O0oDGQ+cmdpHExHA== + +"@swc/core-linux-arm64-gnu@1.3.82": + version "1.3.82" + resolved "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.82.tgz#2313d4901fa0ebdd2a0f189909073e1e8a07f1d6" + integrity sha512-26JkOujbzcItPAmIbD5vHJxQVy5ihcSu3YHTKwope1h28sApZdtE7S3e2G3gsZRTIdsCQkXUtAQeqHxGWWR3pw== + +"@swc/core-linux-arm64-musl@1.3.82": + version "1.3.82" + resolved "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.82.tgz#6e96cf6e52e647fecf27511d766bea90e96f8a2f" + integrity sha512-8Izj9tuuMpoc3cqiPBRtwqpO1BZ/+sfZVsEhLxrbOFlcSb8LnKyMle1g3JMMUwI4EU75RGVIzZMn8A6GOKdJbA== + +"@swc/core-linux-x64-gnu@1.3.82": + version "1.3.82" + resolved "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.82.tgz#6275c10d7c8c0768550bc7934c9dd8cde4881d92" + integrity sha512-0GSrIBScQwTaPv46T2qB7XnDYxndRCpwH4HMjh6FN+I+lfPUhTSJKW8AonqrqT1TbpFIgvzQs7EnTsD7AnSCow== + +"@swc/core-linux-x64-musl@1.3.82": + version "1.3.82" + resolved "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.82.tgz#edb98c30bd0de42bf1a63469937630d942c71988" + integrity sha512-KJUnaaepDKNzrEbwz4jv0iC3/t9x0NSoe06fnkAlhh2+NFKWKKJhVCOBTrpds8n7eylBDIXUlK34XQafjVMUdg== + +"@swc/core-win32-arm64-msvc@1.3.82": + version "1.3.82" + resolved "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.82.tgz#0a8e9b361aac37d01f684c8a3d3e94e5f8c3b14f" + integrity sha512-TR3MHKhDYIyGyFcyl2d/p1ftceXcubAhX5wRSOdtOyr5+K/v3jbyCCqN7bbqO5o43wQVCwwR/drHleYyDZvg8Q== + +"@swc/core-win32-ia32-msvc@1.3.82": + version "1.3.82" + resolved "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.82.tgz#096854ff764282766271f1354ee1214358a8bf01" + integrity sha512-ZX4HzVVt6hs84YUg70UvyBJnBOIspmQQM0iXSzBvOikk3zRoN7BnDwQH4GScvevCEBuou60+i4I6d5kHLOfh8Q== + +"@swc/core-win32-x64-msvc@1.3.82": + version "1.3.82" + resolved "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.82.tgz#1181070bff4a13a7fcc7f1020eef1571f8c1257a" + integrity sha512-4mJMnex21kbQoaHeAmHnVwQN9/XAfPszJ6n9HI7SVH+aAHnbBIR0M59/b50/CJMjTj5niUGk7EwQ3nhVNOG32g== + +"@swc/core@1.3.82": + version "1.3.82" + resolved "https://registry.npmjs.org/@swc/core/-/core-1.3.82.tgz#8f6c53db3c23a1769b6c5085fbcb3b1df9548a40" + integrity sha512-jpC1a18HMH67018Ij2jh+hT7JBFu7ZKcQVfrZ8K6JuEY+kjXmbea07P9MbQUZbAe0FB+xi3CqEVCP73MebodJQ== + dependencies: + "@swc/types" "^0.1.4" + optionalDependencies: + "@swc/core-darwin-arm64" "1.3.82" + "@swc/core-darwin-x64" "1.3.82" + "@swc/core-linux-arm-gnueabihf" "1.3.82" + "@swc/core-linux-arm64-gnu" "1.3.82" + "@swc/core-linux-arm64-musl" "1.3.82" + "@swc/core-linux-x64-gnu" "1.3.82" + "@swc/core-linux-x64-musl" "1.3.82" + "@swc/core-win32-arm64-msvc" "1.3.82" + "@swc/core-win32-ia32-msvc" "1.3.82" + "@swc/core-win32-x64-msvc" "1.3.82" + +"@swc/helpers@^0.5.2": + version "0.5.2" + resolved "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.2.tgz#85ea0c76450b61ad7d10a37050289eded783c27d" + integrity sha512-E4KcWTpoLHqwPHLxidpOqQbcrZVgi0rsmmZXUle1jXmJfuIf/UWpczUJ7MZZ5tlxytgJXyp0w4PGkkeLiuIdZw== + dependencies: + tslib "^2.4.0" + +"@swc/types@^0.1.4": + version "0.1.4" + resolved "https://registry.npmjs.org/@swc/types/-/types-0.1.4.tgz#8d647e111dc97a8e2881bf71c2ee2d011698ff10" + integrity sha512-z/G02d+59gyyUb7KYhKi9jOhicek6QD2oMaotUyG+lUkybpXoV49dY9bj7Ah5Q+y7knK2jU67UTX9FyfGzaxQg== + "@tsconfig/node10@^1.0.7": version "1.0.9" - resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2" + resolved "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2" integrity sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA== "@tsconfig/node12@^1.0.7": version "1.0.11" - resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d" + resolved "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d" integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag== "@tsconfig/node14@^1.0.0": version "1.0.3" - resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1" + resolved "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1" integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow== "@tsconfig/node16@^1.0.2": version "1.0.4" - resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.4.tgz#0b92dcc0cc1c81f6f306a381f28e31b1a56536e9" + resolved "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz#0b92dcc0cc1c81f6f306a381f28e31b1a56536e9" integrity sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA== "@types/babel__core@^7.1.14": version "7.20.1" - resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.20.1.tgz#916ecea274b0c776fec721e333e55762d3a9614b" + resolved "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.1.tgz#916ecea274b0c776fec721e333e55762d3a9614b" integrity sha512-aACu/U/omhdk15O4Nfb+fHgH/z3QsfQzpnvRZhYhThms83ZnAOZz7zZAWO7mn2yyNQaA4xTO8GLK3uqFU4bYYw== dependencies: "@babel/parser" "^7.20.7" @@ -1128,14 +1229,14 @@ "@types/babel__generator@*": version "7.6.4" - resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7" + resolved "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7" integrity sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg== dependencies: "@babel/types" "^7.0.0" "@types/babel__template@*": version "7.4.1" - resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" + resolved "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g== dependencies: "@babel/parser" "^7.1.0" @@ -1143,55 +1244,55 @@ "@types/babel__traverse@*", "@types/babel__traverse@^7.0.6": version "7.20.1" - resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.20.1.tgz#dd6f1d2411ae677dcb2db008c962598be31d6acf" + resolved "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.1.tgz#dd6f1d2411ae677dcb2db008c962598be31d6acf" integrity sha512-MitHFXnhtgwsGZWtT68URpOvLN4EREih1u3QtQiN4VdAxWKRVvGCSvw/Qth0M0Qq3pJpnGOu5JaM/ydK7OGbqg== dependencies: "@babel/types" "^7.20.7" -"@types/benchmark@2.1.2": - version "2.1.2" - resolved "https://registry.yarnpkg.com/@types/benchmark/-/benchmark-2.1.2.tgz#b7838408c93dc08ceb4e6e13147dbfbe6a151f82" - integrity sha512-EDKtLYNMKrig22jEvhXq8TBFyFgVNSPmDF2b9UzJ7+eylPqdZVo17PCUMkn1jP6/1A/0u78VqYC6VrX6b8pDWA== +"@types/benchmark@2.1.4": + version "2.1.4" + resolved "https://registry.yarnpkg.com/@types/benchmark/-/benchmark-2.1.4.tgz#74f331a07ca5a07a14409ccae7e8072775974361" + integrity sha512-rVCCileCU5NhP9Ix1e03sIn4gd0mpjh7VNULVQAxzF+9vddk6A5QAHzp2h5kXH8pkv1Ow45fUf3QP3wOEiISvA== -"@types/command-line-args@5.2.0": - version "5.2.0" - resolved "https://registry.yarnpkg.com/@types/command-line-args/-/command-line-args-5.2.0.tgz#adbb77980a1cc376bb208e3f4142e907410430f6" - integrity sha512-UuKzKpJJ/Ief6ufIaIzr3A/0XnluX7RvFgwkV89Yzvm77wCh1kFaFmqN8XEnGcN62EuHdedQjEMb8mYxFLGPyA== +"@types/command-line-args@^5.2.1": + version "5.2.2" + resolved "https://registry.yarnpkg.com/@types/command-line-args/-/command-line-args-5.2.2.tgz#ca93ae7045305a2b7c60936bfcb1ebc897b42270" + integrity sha512-9aZ7KzLDOBYyqH5J2bvB9edvsMXusX+H/aS8idAJOpWNmscZG5RqO1CVJPFa4Q0/1xKgvxcweXunFVx2l/dYFA== -"@types/command-line-usage@5.0.2": +"@types/command-line-usage@^5.0.2": version "5.0.2" - resolved "https://registry.yarnpkg.com/@types/command-line-usage/-/command-line-usage-5.0.2.tgz#ba5e3f6ae5a2009d466679cc431b50635bf1a064" + resolved "https://registry.npmjs.org/@types/command-line-usage/-/command-line-usage-5.0.2.tgz#ba5e3f6ae5a2009d466679cc431b50635bf1a064" integrity sha512-n7RlEEJ+4x4TS7ZQddTmNSxP+zziEG0TNsMfiRIxcIVXt71ENJ9ojeXmGO3wPoTdn7pJcU2xc3CJYMktNT6DPg== "@types/eslint-scope@^3.7.3": version "3.7.4" - resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.4.tgz#37fc1223f0786c39627068a12e94d6e6fc61de16" + resolved "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.4.tgz#37fc1223f0786c39627068a12e94d6e6fc61de16" integrity sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA== dependencies: "@types/eslint" "*" "@types/estree" "*" "@types/eslint@*": - version "8.40.1" - resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.40.1.tgz#92edc592c3575b52a8e790cd5ec04efe28f3d24c" - integrity sha512-vRb792M4mF1FBT+eoLecmkpLXwxsBHvWWRGJjzbYANBM6DtiJc6yETyv4rqDA6QNjF1pkj1U7LMA6dGb3VYlHw== + version "8.44.2" + resolved "https://registry.npmjs.org/@types/eslint/-/eslint-8.44.2.tgz#0d21c505f98a89b8dd4d37fa162b09da6089199a" + integrity sha512-sdPRb9K6iL5XZOmBubg8yiFp5yS/JdUDQsq5e6h95km91MCYMuvp7mh1fjPEYUhvHepKpZOjnEaMBR4PxjWDzg== dependencies: "@types/estree" "*" "@types/json-schema" "*" "@types/estree@*", "@types/estree@^1.0.0": version "1.0.1" - resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.1.tgz#aa22750962f3bf0e79d753d3cc067f010c95f194" + resolved "https://registry.npmjs.org/@types/estree/-/estree-1.0.1.tgz#aa22750962f3bf0e79d753d3cc067f010c95f194" integrity sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA== "@types/expect@^1.20.4": version "1.20.4" - resolved "https://registry.yarnpkg.com/@types/expect/-/expect-1.20.4.tgz#8288e51737bf7e3ab5d7c77bfa695883745264e5" + resolved "https://registry.npmjs.org/@types/expect/-/expect-1.20.4.tgz#8288e51737bf7e3ab5d7c77bfa695883745264e5" integrity sha512-Q5Vn3yjTDyCMV50TB6VRIbQNxSE4OmZR86VSbGaNpfUolm0iePBB4KdEEHmxoY5sT2+2DIvXW0rvMDP2nHZ4Mg== "@types/glob@8.1.0": version "8.1.0" - resolved "https://registry.yarnpkg.com/@types/glob/-/glob-8.1.0.tgz#b63e70155391b0584dce44e7ea25190bbc38f2fc" + resolved "https://registry.npmjs.org/@types/glob/-/glob-8.1.0.tgz#b63e70155391b0584dce44e7ea25190bbc38f2fc" integrity sha512-IO+MJPVhoqz+28h1qLAcBEH2+xHMK6MTyHJc7MTnnYb6wsoLR29POVGJ7LycmVXIqyy/4/2ShP5sUwTXuOwb/w== dependencies: "@types/minimatch" "^5.1.2" @@ -1199,33 +1300,33 @@ "@types/graceful-fs@^4.1.3": version "4.1.6" - resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.6.tgz#e14b2576a1c25026b7f02ede1de3b84c3a1efeae" + resolved "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.6.tgz#e14b2576a1c25026b7f02ede1de3b84c3a1efeae" integrity sha512-Sig0SNORX9fdW+bQuTEovKj3uHcUL6LQKbCrrqb1X7J6/ReAbhCXRAhc+SMejhLELFj2QcyuxmUooZ4bt5ReSw== dependencies: "@types/node" "*" "@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": version "2.0.4" - resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" + resolved "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== "@types/istanbul-lib-report@*": version "3.0.0" - resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" + resolved "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== dependencies: "@types/istanbul-lib-coverage" "*" "@types/istanbul-reports@^3.0.0": version "3.0.1" - resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" + resolved "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== dependencies: "@types/istanbul-lib-report" "*" "@types/jest@29.5.3": version "29.5.3" - resolved "https://registry.yarnpkg.com/@types/jest/-/jest-29.5.3.tgz#7a35dc0044ffb8b56325c6802a4781a626b05777" + resolved "https://registry.npmjs.org/@types/jest/-/jest-29.5.3.tgz#7a35dc0044ffb8b56325c6802a4781a626b05777" integrity sha512-1Nq7YrO/vJE/FYnqYyw0FS8LdrjExSgIiHyKg7xPpn+yi8Q4huZryKnkJatN1ZRH89Kw2v33/8ZMB7DuZeSLlA== dependencies: expect "^29.0.0" @@ -1233,62 +1334,64 @@ "@types/json-schema@*", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": version "7.0.12" - resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.12.tgz#d70faba7039d5fca54c83c7dbab41051d2b6f6cb" + resolved "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.12.tgz#d70faba7039d5fca54c83c7dbab41051d2b6f6cb" integrity sha512-Hr5Jfhc9eYOQNPYO5WLDq/n4jqijdHNlDXjuAQkkt+mWdQR+XJToOHrsD4cPaMXpn6KO7y2+wM8AZEs8VpBLVA== "@types/minimatch@^5.1.2": version "5.1.2" - resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-5.1.2.tgz#07508b45797cb81ec3f273011b054cd0755eddca" + resolved "https://registry.npmjs.org/@types/minimatch/-/minimatch-5.1.2.tgz#07508b45797cb81ec3f273011b054cd0755eddca" integrity sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA== "@types/minimist@^1.2.2": version "1.2.2" - resolved "https://registry.yarnpkg.com/@types/minimist/-/minimist-1.2.2.tgz#ee771e2ba4b3dc5b372935d549fd9617bf345b8c" + resolved "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.2.tgz#ee771e2ba4b3dc5b372935d549fd9617bf345b8c" integrity sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ== -"@types/node@*", "@types/node@20.3.0": - version "20.3.0" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.3.0.tgz#719498898d5defab83c3560f45d8498f58d11938" - integrity sha512-cumHmIAf6On83X7yP+LrsEyUOf/YlociZelmpRYaGFydoaPdxdt80MAbu6vWerQT2COCp2nPvHdsbD7tHn/YlQ== +"@types/node@*", "@types/node@^20.6.0": + version "20.8.10" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.8.10.tgz#a5448b895c753ae929c26ce85cab557c6d4a365e" + integrity sha512-TlgT8JntpcbmKUFzjhsyhGfP2fsiz1Mv56im6enJ905xG1DAYesxJaeSbGqQmAw8OWPdhyJGhGSQGKRNJ45u9w== + dependencies: + undici-types "~5.26.4" "@types/node@^13.7.4": version "13.13.52" - resolved "https://registry.yarnpkg.com/@types/node/-/node-13.13.52.tgz#03c13be70b9031baaed79481c0c0cfb0045e53f7" + resolved "https://registry.npmjs.org/@types/node/-/node-13.13.52.tgz#03c13be70b9031baaed79481c0c0cfb0045e53f7" integrity sha512-s3nugnZumCC//n4moGGe6tkNMyYEdaDBitVjwPxXmR5lnMG5dHePinH2EdxkG3Rh1ghFHHixAG4NJhpJW1rthQ== "@types/normalize-package-data@^2.4.0": version "2.4.1" - resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz#d3357479a0fdfdd5907fe67e17e0a85c906e1301" + resolved "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz#d3357479a0fdfdd5907fe67e17e0a85c906e1301" integrity sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw== -"@types/pad-left@2.1.1": +"@types/pad-left@^2.1.1": version "2.1.1" - resolved "https://registry.yarnpkg.com/@types/pad-left/-/pad-left-2.1.1.tgz#17d906fc75804e1cc722da73623f1d978f16a137" + resolved "https://registry.npmjs.org/@types/pad-left/-/pad-left-2.1.1.tgz#17d906fc75804e1cc722da73623f1d978f16a137" integrity sha512-Xd22WCRBydkGSApl5Bw0PhAOHKSVjNL3E3AwzKaps96IMraPqy5BvZIsBVK6JLwdybUzjHnuWVwpDd0JjTfHXA== "@types/randomatic@3.1.3": version "3.1.3" - resolved "https://registry.yarnpkg.com/@types/randomatic/-/randomatic-3.1.3.tgz#5475c29e82cb8dab6c94e55e77306c8eedab2d1f" + resolved "https://registry.npmjs.org/@types/randomatic/-/randomatic-3.1.3.tgz#5475c29e82cb8dab6c94e55e77306c8eedab2d1f" integrity sha512-UlYMg/XxN+YMh6vAiB879yh2bhaTOU0DB1g4NGIhzlaiSf22rAVKIGTvH8HjCXu+wfFvjAWHuPG5waN4btEubw== "@types/resolve@1.20.2": version "1.20.2" - resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-1.20.2.tgz#97d26e00cd4a0423b4af620abecf3e6f442b7975" + resolved "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.2.tgz#97d26e00cd4a0423b4af620abecf3e6f442b7975" integrity sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q== "@types/semver@^7.3.12": - version "7.5.0" - resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.0.tgz#591c1ce3a702c45ee15f47a42ade72c2fd78978a" - integrity sha512-G8hZ6XJiHnuhQKR7ZmysCeJWE08o8T0AXtk5darsCaTVsYZhhgUrq53jizaR2FvsoeCwJhlmwTjkXBY5Pn/ZHw== + version "7.5.2" + resolved "https://registry.npmjs.org/@types/semver/-/semver-7.5.2.tgz#31f6eec1ed7ec23f4f05608d3a2d381df041f564" + integrity sha512-7aqorHYgdNO4DM36stTiGO3DvKoex9TQRwsJU6vMaFGyqpBA1MNZkz+PG3gaNUPpTAOYhT1WR7M1JyA3fbS9Cw== "@types/stack-utils@^2.0.0": version "2.0.1" - resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" + resolved "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== "@types/vinyl@^2.0.4": version "2.0.7" - resolved "https://registry.yarnpkg.com/@types/vinyl/-/vinyl-2.0.7.tgz#9739a9a2afaf9af32761c54a0e82c735279f726c" + resolved "https://registry.npmjs.org/@types/vinyl/-/vinyl-2.0.7.tgz#9739a9a2afaf9af32761c54a0e82c735279f726c" integrity sha512-4UqPv+2567NhMQuMLdKAyK4yzrfCqwaTt6bLhHEs8PFcxbHILsrxaY63n4wgE/BRLDWDQeI+WcTmkXKExh9hQg== dependencies: "@types/expect" "^1.20.4" @@ -1296,26 +1399,26 @@ "@types/yargs-parser@*": version "21.0.0" - resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" + resolved "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== "@types/yargs@^15.0.0": version "15.0.15" - resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-15.0.15.tgz#e609a2b1ef9e05d90489c2f5f45bbfb2be092158" + resolved "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.15.tgz#e609a2b1ef9e05d90489c2f5f45bbfb2be092158" integrity sha512-IziEYMU9XoVj8hWg7k+UJrXALkGFjWJhn5QFEv9q4p+v40oZhSuC135M38st8XPjICL7Ey4TV64ferBGUoJhBg== dependencies: "@types/yargs-parser" "*" "@types/yargs@^17.0.8": version "17.0.24" - resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.24.tgz#b3ef8d50ad4aa6aecf6ddc97c580a00f5aa11902" + resolved "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.24.tgz#b3ef8d50ad4aa6aecf6ddc97c580a00f5aa11902" integrity sha512-6i0aC7jV6QzQB8ne1joVZ0eSFIstHsCrobmOtghM11yGlH0j43FKL2UhWdELkyps0zuf7qVTUVCCR+tgSlyLLw== dependencies: "@types/yargs-parser" "*" "@typescript-eslint/eslint-plugin@5.59.9": version "5.59.9" - resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.59.9.tgz#2604cfaf2b306e120044f901e20c8ed926debf15" + resolved "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.59.9.tgz#2604cfaf2b306e120044f901e20c8ed926debf15" integrity sha512-4uQIBq1ffXd2YvF7MAvehWKW3zVv/w+mSfRAu+8cKbfj3nwzyqJLNcZJpQ/WZ1HLbJDiowwmQ6NO+63nCA+fqA== dependencies: "@eslint-community/regexpp" "^4.4.0" @@ -1331,7 +1434,7 @@ "@typescript-eslint/parser@5.59.9": version "5.59.9" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-5.59.9.tgz#a85c47ccdd7e285697463da15200f9a8561dd5fa" + resolved "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.59.9.tgz#a85c47ccdd7e285697463da15200f9a8561dd5fa" integrity sha512-FsPkRvBtcLQ/eVK1ivDiNYBjn3TGJdXy2fhXX+rc7czWl4ARwnpArwbihSOHI2Peg9WbtGHrbThfBUkZZGTtvQ== dependencies: "@typescript-eslint/scope-manager" "5.59.9" @@ -1341,15 +1444,23 @@ "@typescript-eslint/scope-manager@5.59.9": version "5.59.9" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.59.9.tgz#eadce1f2733389cdb58c49770192c0f95470d2f4" + resolved "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.59.9.tgz#eadce1f2733389cdb58c49770192c0f95470d2f4" integrity sha512-8RA+E+w78z1+2dzvK/tGZ2cpGigBZ58VMEHDZtpE1v+LLjzrYGc8mMaTONSxKyEkz3IuXFM0IqYiGHlCsmlZxQ== dependencies: "@typescript-eslint/types" "5.59.9" "@typescript-eslint/visitor-keys" "5.59.9" +"@typescript-eslint/scope-manager@5.62.0": + version "5.62.0" + resolved "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.62.0.tgz#d9457ccc6a0b8d6b37d0eb252a23022478c5460c" + integrity sha512-VXuvVvZeQCQb5Zgf4HAxc04q5j+WrNAtNh9OwCsCgpKqESMTu3tF/jhZ3xG6T4NZwWl65Bg8KuS2uEvhSfLl0w== + dependencies: + "@typescript-eslint/types" "5.62.0" + "@typescript-eslint/visitor-keys" "5.62.0" + "@typescript-eslint/type-utils@5.59.9": version "5.59.9" - resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-5.59.9.tgz#53bfaae2e901e6ac637ab0536d1754dfef4dafc2" + resolved "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.59.9.tgz#53bfaae2e901e6ac637ab0536d1754dfef4dafc2" integrity sha512-ksEsT0/mEHg9e3qZu98AlSrONAQtrSTljL3ow9CGej8eRo7pe+yaC/mvTjptp23Xo/xIf2mLZKC6KPv4Sji26Q== dependencies: "@typescript-eslint/typescript-estree" "5.59.9" @@ -1359,12 +1470,17 @@ "@typescript-eslint/types@5.59.9": version "5.59.9" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.59.9.tgz#3b4e7ae63718ce1b966e0ae620adc4099a6dcc52" + resolved "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.59.9.tgz#3b4e7ae63718ce1b966e0ae620adc4099a6dcc52" integrity sha512-uW8H5NRgTVneSVTfiCVffBb8AbwWSKg7qcA4Ot3JI3MPCJGsB4Db4BhvAODIIYE5mNj7Q+VJkK7JxmRhk2Lyjw== +"@typescript-eslint/types@5.62.0": + version "5.62.0" + resolved "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.62.0.tgz#258607e60effa309f067608931c3df6fed41fd2f" + integrity sha512-87NVngcbVXUahrRTqIK27gD2t5Cu1yuCXxbLcFtCzZGlfyVWWh8mLHkoxzjsB6DDNnvdL+fW8MiwPEJyGJQDgQ== + "@typescript-eslint/typescript-estree@5.59.9": version "5.59.9" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.59.9.tgz#6bfea844e468427b5e72034d33c9fffc9557392b" + resolved "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.59.9.tgz#6bfea844e468427b5e72034d33c9fffc9557392b" integrity sha512-pmM0/VQ7kUhd1QyIxgS+aRvMgw+ZljB3eDb+jYyp6d2bC0mQWLzUDF+DLwCTkQ3tlNyVsvZRXjFyV0LkU/aXjA== dependencies: "@typescript-eslint/types" "5.59.9" @@ -1375,9 +1491,22 @@ semver "^7.3.7" tsutils "^3.21.0" -"@typescript-eslint/utils@5.59.9", "@typescript-eslint/utils@^5.10.0": +"@typescript-eslint/typescript-estree@5.62.0": + version "5.62.0" + resolved "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.62.0.tgz#7d17794b77fabcac615d6a48fb143330d962eb9b" + integrity sha512-CmcQ6uY7b9y694lKdRB8FEel7JbU/40iSAPomu++SjLMntB+2Leay2LO6i8VnJk58MtE9/nQSFIH6jpyRWyYzA== + dependencies: + "@typescript-eslint/types" "5.62.0" + "@typescript-eslint/visitor-keys" "5.62.0" + debug "^4.3.4" + globby "^11.1.0" + is-glob "^4.0.3" + semver "^7.3.7" + tsutils "^3.21.0" + +"@typescript-eslint/utils@5.59.9": version "5.59.9" - resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.59.9.tgz#adee890107b5ffe02cd46fdaa6c2125fb3c6c7c4" + resolved "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.59.9.tgz#adee890107b5ffe02cd46fdaa6c2125fb3c6c7c4" integrity sha512-1PuMYsju/38I5Ggblaeb98TOoUvjhRvLpLa1DoTOFaLWqaXl/1iQ1eGurTXgBY58NUdtfTXKP5xBq7q9NDaLKg== dependencies: "@eslint-community/eslint-utils" "^4.2.0" @@ -1389,17 +1518,44 @@ eslint-scope "^5.1.1" semver "^7.3.7" +"@typescript-eslint/utils@^5.10.0": + version "5.62.0" + resolved "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.62.0.tgz#141e809c71636e4a75daa39faed2fb5f4b10df86" + integrity sha512-n8oxjeb5aIbPFEtmQxQYOLI0i9n5ySBEY/ZEHHZqKQSFnxio1rv6dthascc9dLuwrL0RC5mPCxB7vnAVGAYWAQ== + dependencies: + "@eslint-community/eslint-utils" "^4.2.0" + "@types/json-schema" "^7.0.9" + "@types/semver" "^7.3.12" + "@typescript-eslint/scope-manager" "5.62.0" + "@typescript-eslint/types" "5.62.0" + "@typescript-eslint/typescript-estree" "5.62.0" + eslint-scope "^5.1.1" + semver "^7.3.7" + "@typescript-eslint/visitor-keys@5.59.9": version "5.59.9" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.59.9.tgz#9f86ef8e95aca30fb5a705bb7430f95fc58b146d" + resolved "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.59.9.tgz#9f86ef8e95aca30fb5a705bb7430f95fc58b146d" integrity sha512-bT7s0td97KMaLwpEBckbzj/YohnvXtqbe2XgqNvTl6RJVakY5mvENOTPvw5u66nljfZxthESpDozs86U+oLY8Q== dependencies: "@typescript-eslint/types" "5.59.9" eslint-visitor-keys "^3.3.0" +"@typescript-eslint/visitor-keys@5.62.0": + version "5.62.0" + resolved "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.62.0.tgz#2174011917ce582875954ffe2f6912d5931e353e" + integrity sha512-07ny+LHRzQXepkGg6w0mFY41fVUNBrL2Roj/++7V1txKugfjm/Ci/qSND03r2RhlJhJYMcTn9AhhSSqQp0Ysyw== + dependencies: + "@typescript-eslint/types" "5.62.0" + eslint-visitor-keys "^3.3.0" + +"@ungap/structured-clone@^1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.2.0.tgz#756641adb587851b5ccb3e095daf27ae581c8406" + integrity sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ== + "@webassemblyjs/ast@1.11.6", "@webassemblyjs/ast@^1.11.5": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.6.tgz#db046555d3c413f8966ca50a95176a0e2c642e24" + resolved "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.6.tgz#db046555d3c413f8966ca50a95176a0e2c642e24" integrity sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q== dependencies: "@webassemblyjs/helper-numbers" "1.11.6" @@ -1407,22 +1563,22 @@ "@webassemblyjs/floating-point-hex-parser@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz#dacbcb95aff135c8260f77fa3b4c5fea600a6431" + resolved "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz#dacbcb95aff135c8260f77fa3b4c5fea600a6431" integrity sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw== "@webassemblyjs/helper-api-error@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz#6132f68c4acd59dcd141c44b18cbebbd9f2fa768" + resolved "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz#6132f68c4acd59dcd141c44b18cbebbd9f2fa768" integrity sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q== "@webassemblyjs/helper-buffer@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz#b66d73c43e296fd5e88006f18524feb0f2c7c093" + resolved "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz#b66d73c43e296fd5e88006f18524feb0f2c7c093" integrity sha512-z3nFzdcp1mb8nEOFFk8DrYLpHvhKC3grJD2ardfKOzmbmJvEf/tPIqCY+sNcwZIY8ZD7IkB2l7/pqhUhqm7hLA== "@webassemblyjs/helper-numbers@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz#cbce5e7e0c1bd32cf4905ae444ef64cea919f1b5" + resolved "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz#cbce5e7e0c1bd32cf4905ae444ef64cea919f1b5" integrity sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g== dependencies: "@webassemblyjs/floating-point-hex-parser" "1.11.6" @@ -1431,12 +1587,12 @@ "@webassemblyjs/helper-wasm-bytecode@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz#bb2ebdb3b83aa26d9baad4c46d4315283acd51e9" + resolved "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz#bb2ebdb3b83aa26d9baad4c46d4315283acd51e9" integrity sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA== "@webassemblyjs/helper-wasm-section@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz#ff97f3863c55ee7f580fd5c41a381e9def4aa577" + resolved "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz#ff97f3863c55ee7f580fd5c41a381e9def4aa577" integrity sha512-LPpZbSOwTpEC2cgn4hTydySy1Ke+XEu+ETXuoyvuyezHO3Kjdu90KK95Sh9xTbmjrCsUwvWwCOQQNta37VrS9g== dependencies: "@webassemblyjs/ast" "1.11.6" @@ -1446,26 +1602,26 @@ "@webassemblyjs/ieee754@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz#bb665c91d0b14fffceb0e38298c329af043c6e3a" + resolved "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz#bb665c91d0b14fffceb0e38298c329af043c6e3a" integrity sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg== dependencies: "@xtuc/ieee754" "^1.2.0" "@webassemblyjs/leb128@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.6.tgz#70e60e5e82f9ac81118bc25381a0b283893240d7" + resolved "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.6.tgz#70e60e5e82f9ac81118bc25381a0b283893240d7" integrity sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ== dependencies: "@xtuc/long" "4.2.2" "@webassemblyjs/utf8@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.6.tgz#90f8bc34c561595fe156603be7253cdbcd0fab5a" + resolved "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.6.tgz#90f8bc34c561595fe156603be7253cdbcd0fab5a" integrity sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA== "@webassemblyjs/wasm-edit@^1.11.5": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz#c72fa8220524c9b416249f3d94c2958dfe70ceab" + resolved "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz#c72fa8220524c9b416249f3d94c2958dfe70ceab" integrity sha512-Ybn2I6fnfIGuCR+Faaz7YcvtBKxvoLV3Lebn1tM4o/IAJzmi9AWYIPWpyBfU8cC+JxAO57bk4+zdsTjJR+VTOw== dependencies: "@webassemblyjs/ast" "1.11.6" @@ -1479,7 +1635,7 @@ "@webassemblyjs/wasm-gen@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz#fb5283e0e8b4551cc4e9c3c0d7184a65faf7c268" + resolved "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz#fb5283e0e8b4551cc4e9c3c0d7184a65faf7c268" integrity sha512-3XOqkZP/y6B4F0PBAXvI1/bky7GryoogUtfwExeP/v7Nzwo1QLcq5oQmpKlftZLbT+ERUOAZVQjuNVak6UXjPA== dependencies: "@webassemblyjs/ast" "1.11.6" @@ -1490,7 +1646,7 @@ "@webassemblyjs/wasm-opt@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz#d9a22d651248422ca498b09aa3232a81041487c2" + resolved "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz#d9a22d651248422ca498b09aa3232a81041487c2" integrity sha512-cOrKuLRE7PCe6AsOVl7WasYf3wbSo4CeOk6PkrjS7g57MFfVUF9u6ysQBBODX0LdgSvQqRiGz3CXvIDKcPNy4g== dependencies: "@webassemblyjs/ast" "1.11.6" @@ -1500,7 +1656,7 @@ "@webassemblyjs/wasm-parser@1.11.6", "@webassemblyjs/wasm-parser@^1.11.5": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz#bb85378c527df824004812bbdb784eea539174a1" + resolved "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz#bb85378c527df824004812bbdb784eea539174a1" integrity sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ== dependencies: "@webassemblyjs/ast" "1.11.6" @@ -1512,7 +1668,7 @@ "@webassemblyjs/wast-printer@1.11.6": version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz#a7bf8dd7e362aeb1668ff43f35cb849f188eff20" + resolved "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz#a7bf8dd7e362aeb1668ff43f35cb849f188eff20" integrity sha512-JM7AhRcE+yW2GWYaKeHL5vt4xqee5N2WcezptmgyhNS+ScggqcT1OtXykhAb13Sn5Yas0j2uv9tHgrjwvzAP4A== dependencies: "@webassemblyjs/ast" "1.11.6" @@ -1520,42 +1676,42 @@ "@xtuc/ieee754@^1.2.0": version "1.2.0" - resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" + resolved "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== "@xtuc/long@4.2.2": version "4.2.2" - resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" + resolved "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== acorn-import-assertions@^1.9.0: version "1.9.0" - resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz#507276249d684797c84e0734ef84860334cfb1ac" + resolved "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz#507276249d684797c84e0734ef84860334cfb1ac" integrity sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA== acorn-jsx@^5.3.2: version "5.3.2" - resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + resolved "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== acorn-walk@^8.0.0, acorn-walk@^8.1.1: version "8.2.0" - resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" + resolved "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== acorn@^6.4.1: version "6.4.2" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.4.2.tgz#35866fd710528e92de10cf06016498e47e39e1e6" + resolved "https://registry.npmjs.org/acorn/-/acorn-6.4.2.tgz#35866fd710528e92de10cf06016498e47e39e1e6" integrity sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ== -acorn@^8.0.4, acorn@^8.4.1, acorn@^8.7.1, acorn@^8.8.0, acorn@^8.8.2: - version "8.8.2" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.2.tgz#1b2f25db02af965399b9776b0c2c391276d37c4a" - integrity sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw== +acorn@^8.0.4, acorn@^8.4.1, acorn@^8.7.1, acorn@^8.8.2, acorn@^8.9.0: + version "8.10.0" + resolved "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz#8be5b3907a67221a81ab23c7889c4c5526b62ec5" + integrity sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw== aggregate-error@^4.0.0: version "4.0.1" - resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-4.0.1.tgz#25091fe1573b9e0be892aeda15c7c66a545f758e" + resolved "https://registry.npmjs.org/aggregate-error/-/aggregate-error-4.0.1.tgz#25091fe1573b9e0be892aeda15c7c66a545f758e" integrity sha512-0poP0T7el6Vq3rstR8Mn4V/IQrpBLO6POkUSrN7RhyY+GF/InCFShQzsQ39T25gkHhLgSLByyAz+Kjb+c2L98w== dependencies: clean-stack "^4.0.0" @@ -1563,12 +1719,12 @@ aggregate-error@^4.0.0: ajv-keywords@^3.5.2: version "3.5.2" - resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" + resolved "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== -ajv@^6.10.0, ajv@^6.12.4, ajv@^6.12.5: +ajv@^6.12.4, ajv@^6.12.5: version "6.12.6" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== dependencies: fast-deep-equal "^3.1.1" @@ -1578,82 +1734,82 @@ ajv@^6.10.0, ajv@^6.12.4, ajv@^6.12.5: ansi-colors@^1.0.1: version "1.1.0" - resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-1.1.0.tgz#6374b4dd5d4718ff3ce27a671a3b1cad077132a9" + resolved "https://registry.npmjs.org/ansi-colors/-/ansi-colors-1.1.0.tgz#6374b4dd5d4718ff3ce27a671a3b1cad077132a9" integrity sha512-SFKX67auSNoVR38N3L+nvsPjOE0bybKTYbkf5tRvushrAPQ9V75huw0ZxBkKVeRU9kqH3d6HA4xTckbwZ4ixmA== dependencies: ansi-wrap "^0.1.0" ansi-colors@^3.0.5: version "3.2.4" - resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.4.tgz#e3a3da4bfbae6c86a9c285625de124a234026fbf" + resolved "https://registry.npmjs.org/ansi-colors/-/ansi-colors-3.2.4.tgz#e3a3da4bfbae6c86a9c285625de124a234026fbf" integrity sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA== ansi-escapes@^4.2.1, ansi-escapes@^4.3.0: version "4.3.2" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + resolved "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== dependencies: type-fest "^0.21.3" ansi-gray@^0.1.1: version "0.1.1" - resolved "https://registry.yarnpkg.com/ansi-gray/-/ansi-gray-0.1.1.tgz#2962cf54ec9792c48510a3deb524436861ef7251" + resolved "https://registry.npmjs.org/ansi-gray/-/ansi-gray-0.1.1.tgz#2962cf54ec9792c48510a3deb524436861ef7251" integrity sha512-HrgGIZUl8h2EHuZaU9hTR/cU5nhKxpVE1V6kdGsQ8e4zirElJ5fvtfc8N7Q1oq1aatO275i8pUFUCpNWCAnVWw== dependencies: ansi-wrap "0.1.0" ansi-regex@^2.0.0: version "2.1.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" + resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" integrity sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA== ansi-regex@^5.0.1: version "5.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== ansi-regex@^6.0.1: version "6.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" + resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== ansi-sequence-parser@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/ansi-sequence-parser/-/ansi-sequence-parser-1.1.0.tgz#4d790f31236ac20366b23b3916b789e1bde39aed" - integrity sha512-lEm8mt52to2fT8GhciPCGeCXACSz2UwIN4X2e2LJSnZ5uAbn2/dsYdOmUXq0AtWS5cpAupysIneExOgH0Vd2TQ== + version "1.1.1" + resolved "https://registry.npmjs.org/ansi-sequence-parser/-/ansi-sequence-parser-1.1.1.tgz#e0aa1cdcbc8f8bb0b5bca625aac41f5f056973cf" + integrity sha512-vJXt3yiaUL4UU546s3rPXlsry/RnM730G1+HkpKE012AN0sx1eOrxSu95oKDIonskeLTijMgqWZ3uDEe3NFvyg== ansi-styles@^3.2.1: version "3.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: color-convert "^1.9.0" ansi-styles@^4.0.0, ansi-styles@^4.1.0: version "4.3.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== dependencies: color-convert "^2.0.1" ansi-styles@^5.0.0: version "5.2.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" + resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== ansi-styles@^6.1.0: version "6.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" + resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== ansi-wrap@0.1.0, ansi-wrap@^0.1.0: version "0.1.0" - resolved "https://registry.yarnpkg.com/ansi-wrap/-/ansi-wrap-0.1.0.tgz#a82250ddb0015e9a27ca82e82ea603bbfa45efaf" + resolved "https://registry.npmjs.org/ansi-wrap/-/ansi-wrap-0.1.0.tgz#a82250ddb0015e9a27ca82e82ea603bbfa45efaf" integrity sha512-ZyznvL8k/FZeQHr2T6LzcJ/+vBApDnMNZvfVFy3At0knswWd6rJ3/0Hhmpu8oqa6C92npmozs890sX9Dl6q+Qw== anymatch@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" + resolved "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== dependencies: micromatch "^3.1.4" @@ -1661,7 +1817,7 @@ anymatch@^2.0.0: anymatch@^3.0.3: version "3.1.3" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" + resolved "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== dependencies: normalize-path "^3.0.0" @@ -1669,85 +1825,85 @@ anymatch@^3.0.3: append-buffer@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/append-buffer/-/append-buffer-1.0.2.tgz#d8220cf466081525efea50614f3de6514dfa58f1" + resolved "https://registry.npmjs.org/append-buffer/-/append-buffer-1.0.2.tgz#d8220cf466081525efea50614f3de6514dfa58f1" integrity sha512-WLbYiXzD3y/ATLZFufV/rZvWdZOs+Z/+5v1rBZ463Jn398pa6kcde27cvozYnBoxXblGZTFfoPpsaEw0orU5BA== dependencies: buffer-equal "^1.0.0" archy@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/archy/-/archy-1.0.0.tgz#f9c8c13757cc1dd7bc379ac77b2c62a5c2868c40" + resolved "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz#f9c8c13757cc1dd7bc379ac77b2c62a5c2868c40" integrity sha512-Xg+9RwCg/0p32teKdGMPTPnVXKD0w3DfHnFTficozsAgsvq2XenPJq/MYpzzQ/v8zrOyJn6Ds39VA4JIDwFfqw== arg@^4.1.0: version "4.1.3" - resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" + resolved "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== arg@^5.0.2: version "5.0.2" - resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" + resolved "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== argparse@^1.0.7: version "1.0.10" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + resolved "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== dependencies: sprintf-js "~1.0.2" argparse@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + resolved "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== arr-diff@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" + resolved "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" integrity sha512-YVIQ82gZPGBebQV/a8dar4AitzCQs0jjXwMPZllpXMaGjXPYVUawSxQrRsjhjupyVxEvbHgUmIhKVlND+j02kA== arr-filter@^1.1.1: version "1.1.2" - resolved "https://registry.yarnpkg.com/arr-filter/-/arr-filter-1.1.2.tgz#43fdddd091e8ef11aa4c45d9cdc18e2dff1711ee" + resolved "https://registry.npmjs.org/arr-filter/-/arr-filter-1.1.2.tgz#43fdddd091e8ef11aa4c45d9cdc18e2dff1711ee" integrity sha512-A2BETWCqhsecSvCkWAeVBFLH6sXEUGASuzkpjL3GR1SlL/PWL6M3J8EAAld2Uubmh39tvkJTqC9LeLHCUKmFXA== dependencies: make-iterator "^1.0.0" arr-flatten@^1.0.1, arr-flatten@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" + resolved "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== arr-map@^2.0.0, arr-map@^2.0.2: version "2.0.2" - resolved "https://registry.yarnpkg.com/arr-map/-/arr-map-2.0.2.tgz#3a77345ffc1cf35e2a91825601f9e58f2e24cac4" + resolved "https://registry.npmjs.org/arr-map/-/arr-map-2.0.2.tgz#3a77345ffc1cf35e2a91825601f9e58f2e24cac4" integrity sha512-tVqVTHt+Q5Xb09qRkbu+DidW1yYzz5izWS2Xm2yFm7qJnmUfz4HPzNxbHkdRJbz2lrqI7S+z17xNYdFcBBO8Hw== dependencies: make-iterator "^1.0.0" arr-union@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" + resolved "https://registry.npmjs.org/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" integrity sha512-sKpyeERZ02v1FeCZT8lrfJq5u6goHCtpTAzPwJYe7c8SPFOboNjNg1vz2L4VTn9T4PQxEx13TbXLmYUcS6Ug7Q== array-back@^3.0.1, array-back@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/array-back/-/array-back-3.1.0.tgz#b8859d7a508871c9a7b2cf42f99428f65e96bfb0" + resolved "https://registry.npmjs.org/array-back/-/array-back-3.1.0.tgz#b8859d7a508871c9a7b2cf42f99428f65e96bfb0" integrity sha512-TkuxA4UCOvxuDK6NZYXCalszEzj+TLszyASooky+i742l9TqsOdYCMJJupxRic61hwquNtppB3hgcuq9SVSH1Q== array-back@^6.2.2: version "6.2.2" - resolved "https://registry.yarnpkg.com/array-back/-/array-back-6.2.2.tgz#f567d99e9af88a6d3d2f9dfcc21db6f9ba9fd157" + resolved "https://registry.npmjs.org/array-back/-/array-back-6.2.2.tgz#f567d99e9af88a6d3d2f9dfcc21db6f9ba9fd157" integrity sha512-gUAZ7HPyb4SJczXAMUXMGAvI976JoK3qEx9v1FTmeYuJj0IBiaKttG1ydtGKdkfqWkIkouke7nG8ufGy77+Cvw== array-each@^1.0.0, array-each@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/array-each/-/array-each-1.0.1.tgz#a794af0c05ab1752846ee753a1f211a05ba0c44f" + resolved "https://registry.npmjs.org/array-each/-/array-each-1.0.1.tgz#a794af0c05ab1752846ee753a1f211a05ba0c44f" integrity sha512-zHjL5SZa68hkKHBFBK6DJCTtr9sfTCPCaph/L7tMSLcTFgy+zX7E+6q5UArbtOtMBCtxdICpfTCspRse+ywyXA== array-initial@^1.0.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/array-initial/-/array-initial-1.1.0.tgz#2fa74b26739371c3947bd7a7adc73be334b3d795" + resolved "https://registry.npmjs.org/array-initial/-/array-initial-1.1.0.tgz#2fa74b26739371c3947bd7a7adc73be334b3d795" integrity sha512-BC4Yl89vneCYfpLrs5JU2aAu9/a+xWbeKhvISg9PT7eWFB9UlRvI+rKEtk6mgxWr3dSkk9gQ8hCrdqt06NXPdw== dependencies: array-slice "^1.0.0" @@ -1755,19 +1911,19 @@ array-initial@^1.0.0: array-last@^1.1.1: version "1.3.0" - resolved "https://registry.yarnpkg.com/array-last/-/array-last-1.3.0.tgz#7aa77073fec565ddab2493f5f88185f404a9d336" + resolved "https://registry.npmjs.org/array-last/-/array-last-1.3.0.tgz#7aa77073fec565ddab2493f5f88185f404a9d336" integrity sha512-eOCut5rXlI6aCOS7Z7kCplKRKyiFQ6dHFBem4PwlwKeNFk2/XxTrhRh5T9PyaEWGy/NHTZWbY+nsZlNFJu9rYg== dependencies: is-number "^4.0.0" array-slice@^1.0.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/array-slice/-/array-slice-1.1.0.tgz#e368ea15f89bc7069f7ffb89aec3a6c7d4ac22d4" + resolved "https://registry.npmjs.org/array-slice/-/array-slice-1.1.0.tgz#e368ea15f89bc7069f7ffb89aec3a6c7d4ac22d4" integrity sha512-B1qMD3RBP7O8o0H2KbrXDyB0IccejMF15+87Lvlor12ONPRHP6gTjXMNkt/d3ZuOGbAe66hFmaCfECI24Ufp6w== array-sort@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/array-sort/-/array-sort-1.0.0.tgz#e4c05356453f56f53512a7d1d6123f2c54c0a88a" + resolved "https://registry.npmjs.org/array-sort/-/array-sort-1.0.0.tgz#e4c05356453f56f53512a7d1d6123f2c54c0a88a" integrity sha512-ihLeJkonmdiAsD7vpgN3CRcx2J2S0TiYW+IS/5zHBI7mKUq3ySvBdzzBfD236ubDBQFiiyG3SWCPc+msQ9KoYg== dependencies: default-compare "^1.0.0" @@ -1776,37 +1932,37 @@ array-sort@^1.0.0: array-union@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + resolved "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== array-unique@^0.3.2: version "0.3.2" - resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" + resolved "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" integrity sha512-SleRWjh9JUud2wH1hPs9rZBZ33H6T9HOiL0uwGnGx9FpE6wKGyfWugmbkEOIs6qWrZhg0LWeLziLrEwQJhs5mQ== arrify@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" + resolved "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" integrity sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA== asap@~2.0.6: version "2.0.6" - resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" + resolved "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" integrity sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA== assign-symbols@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" + resolved "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" integrity sha512-Q+JC7Whu8HhmTdBph/Tq59IoRtoy6KAm5zzPv00WdujX82lbAL8K7WVjne7vdCsAmbF4AYaDOPyO3k0kl8qIrw== astral-regex@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31" + resolved "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31" integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ== async-done@2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/async-done/-/async-done-2.0.0.tgz#f1ec5df738c6383a52b0a30d0902fd897329c15a" + resolved "https://registry.npmjs.org/async-done/-/async-done-2.0.0.tgz#f1ec5df738c6383a52b0a30d0902fd897329c15a" integrity sha512-j0s3bzYq9yKIVLKGE/tWlCpa3PfFLcrDZLTSVdnnCTGagXuXBJO4SsY9Xdk/fQBirCkH4evW5xOeJXqlAQFdsw== dependencies: end-of-stream "^1.4.4" @@ -1815,7 +1971,7 @@ async-done@2.0.0: async-done@^1.2.0, async-done@^1.2.2: version "1.3.2" - resolved "https://registry.yarnpkg.com/async-done/-/async-done-1.3.2.tgz#5e15aa729962a4b07414f528a88cdf18e0b290a2" + resolved "https://registry.npmjs.org/async-done/-/async-done-1.3.2.tgz#5e15aa729962a4b07414f528a88cdf18e0b290a2" integrity sha512-uYkTP8dw2og1tu1nmza1n1CMW0qb8gWWlwqMmLb7MhBVs4BXrFziT6HXUd+/RlRA/i4H9AkofYloUbs1fwMqlw== dependencies: end-of-stream "^1.1.0" @@ -1825,37 +1981,37 @@ async-done@^1.2.0, async-done@^1.2.2: async-each@^1.0.1: version "1.0.6" - resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.6.tgz#52f1d9403818c179b7561e11a5d1b77eb2160e77" + resolved "https://registry.npmjs.org/async-each/-/async-each-1.0.6.tgz#52f1d9403818c179b7561e11a5d1b77eb2160e77" integrity sha512-c646jH1avxr+aVpndVMeAfYw7wAa6idufrlN3LPA4PmKS0QEGp6PIC9nwz0WQkkvBGAMEki3pFdtxaF39J9vvg== async-settle@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/async-settle/-/async-settle-1.0.0.tgz#1d0a914bb02575bec8a8f3a74e5080f72b2c0c6b" + resolved "https://registry.npmjs.org/async-settle/-/async-settle-1.0.0.tgz#1d0a914bb02575bec8a8f3a74e5080f72b2c0c6b" integrity sha512-VPXfB4Vk49z1LHHodrEQ6Xf7W4gg1w0dAPROHngx7qgDjqmIQ+fXmwgGXTW/ITLai0YLSvWepJOP9EVpMnEAcw== dependencies: async-done "^1.2.2" atob@^2.1.2: version "2.1.2" - resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" + resolved "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== -babel-jest@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-29.6.2.tgz#cada0a59e07f5acaeb11cbae7e3ba92aec9c1126" - integrity sha512-BYCzImLos6J3BH/+HvUCHG1dTf2MzmAB4jaVxHV+29RZLjR29XuYTmsf2sdDwkrb+FczkGo3kOhE7ga6sI0P4A== +babel-jest@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz#f4369919225b684c56085998ac63dbd05be020d5" + integrity sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg== dependencies: - "@jest/transform" "^29.6.2" + "@jest/transform" "^29.7.0" "@types/babel__core" "^7.1.14" babel-plugin-istanbul "^6.1.1" - babel-preset-jest "^29.5.0" + babel-preset-jest "^29.6.3" chalk "^4.0.0" graceful-fs "^4.2.9" slash "^3.0.0" babel-plugin-istanbul@^6.1.1: version "6.1.1" - resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" + resolved "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" @@ -1864,10 +2020,10 @@ babel-plugin-istanbul@^6.1.1: istanbul-lib-instrument "^5.0.4" test-exclude "^6.0.0" -babel-plugin-jest-hoist@^29.5.0: - version "29.5.0" - resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.5.0.tgz#a97db437936f441ec196990c9738d4b88538618a" - integrity sha512-zSuuuAlTMT4mzLj2nPnUm6fsE6270vdOfnpbJ+RmruU75UhLFvL0N2NgI7xpeS7NaB6hGqmd5pVpGTDYvi4Q3w== +babel-plugin-jest-hoist@^29.6.3: + version "29.6.3" + resolved "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz#aadbe943464182a8922c3c927c3067ff40d24626" + integrity sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg== dependencies: "@babel/template" "^7.3.3" "@babel/types" "^7.3.3" @@ -1876,7 +2032,7 @@ babel-plugin-jest-hoist@^29.5.0: babel-preset-current-node-syntax@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" + resolved "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== dependencies: "@babel/plugin-syntax-async-generators" "^7.8.4" @@ -1892,17 +2048,17 @@ babel-preset-current-node-syntax@^1.0.0: "@babel/plugin-syntax-optional-chaining" "^7.8.3" "@babel/plugin-syntax-top-level-await" "^7.8.3" -babel-preset-jest@^29.5.0: - version "29.5.0" - resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-29.5.0.tgz#57bc8cc88097af7ff6a5ab59d1cd29d52a5916e2" - integrity sha512-JOMloxOqdiBSxMAzjRaH023/vvcaSaec49zvg+2LmNsktC7ei39LTJGw02J+9uUtTZUq6xbLyJ4dxe9sSmIuAg== +babel-preset-jest@^29.6.3: + version "29.6.3" + resolved "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz#fa05fa510e7d493896d7b0dd2033601c840f171c" + integrity sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA== dependencies: - babel-plugin-jest-hoist "^29.5.0" + babel-plugin-jest-hoist "^29.6.3" babel-preset-current-node-syntax "^1.0.0" bach@^1.0.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/bach/-/bach-1.2.0.tgz#4b3ce96bf27134f79a1b414a51c14e34c3bd9880" + resolved "https://registry.npmjs.org/bach/-/bach-1.2.0.tgz#4b3ce96bf27134f79a1b414a51c14e34c3bd9880" integrity sha512-bZOOfCb3gXBXbTFXq3OZtGR88LwGeJvzu6szttaIzymOTS4ZttBNOWSv7aLZja2EMycKtRYV0Oa8SNKH/zkxvg== dependencies: arr-filter "^1.1.1" @@ -1917,12 +2073,12 @@ bach@^1.0.0: balanced-match@^1.0.0: version "1.0.2" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== base@^0.11.1: version "0.11.2" - resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" + resolved "https://registry.npmjs.org/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== dependencies: cache-base "^1.0.1" @@ -1935,7 +2091,7 @@ base@^0.11.1: benchmark@^2.1.4: version "2.1.4" - resolved "https://registry.yarnpkg.com/benchmark/-/benchmark-2.1.4.tgz#09f3de31c916425d498cc2ee565a0ebf3c2a5629" + resolved "https://registry.npmjs.org/benchmark/-/benchmark-2.1.4.tgz#09f3de31c916425d498cc2ee565a0ebf3c2a5629" integrity sha512-l9MlfN4M1K/H2fbhfMy3B7vJd6AGKJVQn2h6Sg/Yx+KckoUA7ewS5Vv6TjSq18ooE1kS9hhAlQRH3AkXIh/aOQ== dependencies: lodash "^4.17.4" @@ -1943,7 +2099,7 @@ benchmark@^2.1.4: benny@3.7.1: version "3.7.1" - resolved "https://registry.yarnpkg.com/benny/-/benny-3.7.1.tgz#964aaaf877e3ab658f79705422277b8471868e37" + resolved "https://registry.npmjs.org/benny/-/benny-3.7.1.tgz#964aaaf877e3ab658f79705422277b8471868e37" integrity sha512-USzYxODdVfOS7JuQq/L0naxB788dWCiUgUTxvN+WLPt/JfcDURNNj8kN/N+uK6PDvuR67/9/55cVKGPleFQINA== dependencies: "@arrows/composition" "^1.0.0" @@ -1958,24 +2114,24 @@ benny@3.7.1: binary-extensions@^1.0.0: version "1.13.1" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" + resolved "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== binaryextensions@^2.2.0: version "2.3.0" - resolved "https://registry.yarnpkg.com/binaryextensions/-/binaryextensions-2.3.0.tgz#1d269cbf7e6243ea886aa41453c3651ccbe13c22" + resolved "https://registry.npmjs.org/binaryextensions/-/binaryextensions-2.3.0.tgz#1d269cbf7e6243ea886aa41453c3651ccbe13c22" integrity sha512-nAihlQsYGyc5Bwq6+EsubvANYGExeJKHDO3RjnvwU042fawQTQfM3Kxn7IHUXQOz4bzfwsGYYHGSvXyW4zOGLg== bindings@^1.5.0: version "1.5.0" - resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.5.0.tgz#10353c9e945334bc0511a6d90b38fbc7c9c504df" + resolved "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz#10353c9e945334bc0511a6d90b38fbc7c9c504df" integrity sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ== dependencies: file-uri-to-path "1.0.0" bl@^1.2.1: version "1.2.3" - resolved "https://registry.yarnpkg.com/bl/-/bl-1.2.3.tgz#1e8dd80142eac80d7158c9dccc047fb620e035e7" + resolved "https://registry.npmjs.org/bl/-/bl-1.2.3.tgz#1e8dd80142eac80d7158c9dccc047fb620e035e7" integrity sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww== dependencies: readable-stream "^2.3.5" @@ -1983,7 +2139,7 @@ bl@^1.2.1: brace-expansion@^1.1.7: version "1.1.11" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== dependencies: balanced-match "^1.0.0" @@ -1991,14 +2147,14 @@ brace-expansion@^1.1.7: brace-expansion@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== dependencies: balanced-match "^1.0.0" braces@^2.3.1, braces@^2.3.2: version "2.3.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" + resolved "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== dependencies: arr-flatten "^1.1.0" @@ -2014,53 +2170,53 @@ braces@^2.3.1, braces@^2.3.2: braces@^3.0.2: version "3.0.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + resolved "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== dependencies: fill-range "^7.0.1" -browserslist@^4.14.5, browserslist@^4.21.3: - version "4.21.7" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.7.tgz#e2b420947e5fb0a58e8f4668ae6e23488127e551" - integrity sha512-BauCXrQ7I2ftSqd2mvKHGo85XR0u7Ru3C/Hxsy/0TkfCtjrmAbPdzLGasmoiBxplpDXlPvdjX9u7srIMfgasNA== +browserslist@^4.14.5, browserslist@^4.21.9: + version "4.21.10" + resolved "https://registry.npmjs.org/browserslist/-/browserslist-4.21.10.tgz#dbbac576628c13d3b2231332cb2ec5a46e015bb0" + integrity sha512-bipEBdZfVH5/pwrvqc+Ub0kUPVfGUhlKxbvfD+z1BDnPEO/X98ruXGA1WP5ASpAFKan7Qr6j736IacbZQuAlKQ== dependencies: - caniuse-lite "^1.0.30001489" - electron-to-chromium "^1.4.411" - node-releases "^2.0.12" + caniuse-lite "^1.0.30001517" + electron-to-chromium "^1.4.477" + node-releases "^2.0.13" update-browserslist-db "^1.0.11" bs-logger@0.x: version "0.2.6" - resolved "https://registry.yarnpkg.com/bs-logger/-/bs-logger-0.2.6.tgz#eb7d365307a72cf974cc6cda76b68354ad336bd8" + resolved "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz#eb7d365307a72cf974cc6cda76b68354ad336bd8" integrity sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog== dependencies: fast-json-stable-stringify "2.x" bser@2.1.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" + resolved "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== dependencies: node-int64 "^0.4.0" buffer-equal@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/buffer-equal/-/buffer-equal-1.0.1.tgz#2f7651be5b1b3f057fcd6e7ee16cf34767077d90" + resolved "https://registry.npmjs.org/buffer-equal/-/buffer-equal-1.0.1.tgz#2f7651be5b1b3f057fcd6e7ee16cf34767077d90" integrity sha512-QoV3ptgEaQpvVwbXdSO39iqPQTCxSF7A5U99AxbHYqUdCizL/lH2Z0A2y6nbZucxMEOtNyZfG2s6gsVugGpKkg== buffer-from@^1.0.0: version "1.1.2" - resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" + resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== builtin-modules@^3.3.0: version "3.3.0" - resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.3.0.tgz#cae62812b89801e9656336e46223e030386be7b6" + resolved "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.3.0.tgz#cae62812b89801e9656336e46223e030386be7b6" integrity sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw== cache-base@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" + resolved "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== dependencies: collection-visit "^1.0.0" @@ -2075,7 +2231,7 @@ cache-base@^1.0.1: call-bind@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + resolved "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== dependencies: function-bind "^1.1.1" @@ -2083,12 +2239,12 @@ call-bind@^1.0.2: callsites@^3.0.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + resolved "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== camelcase-keys@^7.0.0: version "7.0.2" - resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-7.0.2.tgz#d048d8c69448745bb0de6fc4c1c52a30dfbe7252" + resolved "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-7.0.2.tgz#d048d8c69448745bb0de6fc4c1c52a30dfbe7252" integrity sha512-Rjs1H+A9R+Ig+4E/9oyB66UC5Mj9Xq3N//vcLf2WzgdTi/3gUu3Z9KoqmlrEG4VuuLK8wJHofxzdQXz/knhiYg== dependencies: camelcase "^6.3.0" @@ -2098,42 +2254,42 @@ camelcase-keys@^7.0.0: camelcase@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-3.0.0.tgz#32fc4b9fcdaf845fcdf7e73bb97cac2261f0ab0a" + resolved "https://registry.npmjs.org/camelcase/-/camelcase-3.0.0.tgz#32fc4b9fcdaf845fcdf7e73bb97cac2261f0ab0a" integrity sha512-4nhGqUkc4BqbBBB4Q6zLuD7lzzrHYrjKGeYaEji/3tFR5VdJu9v+LilhGIVe8wxEJPPOeWo7eg8dwY13TZ1BNg== camelcase@^5.3.1: version "5.3.1" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + resolved "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== camelcase@^6.2.0, camelcase@^6.3.0: version "6.3.0" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" + resolved "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== -caniuse-lite@^1.0.30001489: - version "1.0.30001499" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001499.tgz#0235c127d9795c82aaf0a7f43e24018549dac659" - integrity sha512-IhoQqRrW6WiecFcfZgoJS1YLEN1/HR1vHP5WNgjCARRW7KUNToHHTX3FrwCM+y4zkRa48D9rE90WFYc2IWhDWQ== +caniuse-lite@^1.0.30001517: + version "1.0.30001534" + resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001534.tgz#f24a9b2a6d39630bac5c132b5dff89b39a12e7dd" + integrity sha512-vlPVrhsCS7XaSh2VvWluIQEzVhefrUQcEsQWSS5A5V+dM07uv1qHeQzAOTGIMy9i3e9bH15+muvI/UHojVgS/Q== chalk-template@^0.4.0: version "0.4.0" - resolved "https://registry.yarnpkg.com/chalk-template/-/chalk-template-0.4.0.tgz#692c034d0ed62436b9062c1707fadcd0f753204b" + resolved "https://registry.npmjs.org/chalk-template/-/chalk-template-0.4.0.tgz#692c034d0ed62436b9062c1707fadcd0f753204b" integrity sha512-/ghrgmhfY8RaSdeo43hNXxpoHAtxdbskUHjPpfqUWGttFgycUhYPGx3YZBCnUCvOa7Doivn1IZec3DEGFoMgLg== dependencies: chalk "^4.1.2" chalk@4.x, chalk@^4.0.0, chalk@^4.1.2: version "4.1.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + resolved "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== dependencies: ansi-styles "^4.1.0" supports-color "^7.1.0" -chalk@^2.0.0, chalk@^2.4.2: +chalk@^2.4.2: version "2.4.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + resolved "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: ansi-styles "^3.2.1" @@ -2142,12 +2298,12 @@ chalk@^2.0.0, chalk@^2.4.2: char-regex@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" + resolved "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== chokidar@^2.0.0: version "2.1.8" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917" + resolved "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917" integrity sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg== dependencies: anymatch "^2.0.0" @@ -2166,27 +2322,27 @@ chokidar@^2.0.0: chrome-trace-event@^1.0.2: version "1.0.3" - resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" + resolved "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== ci-info@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" + resolved "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== ci-info@^3.2.0, ci-info@^3.8.0: version "3.8.0" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.8.0.tgz#81408265a5380c929f0bc665d62256628ce9ef91" + resolved "https://registry.npmjs.org/ci-info/-/ci-info-3.8.0.tgz#81408265a5380c929f0bc665d62256628ce9ef91" integrity sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw== cjs-module-lexer@^1.0.0: version "1.2.3" - resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.3.tgz#6c370ab19f8a3394e318fe682686ec0ac684d107" + resolved "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.2.3.tgz#6c370ab19f8a3394e318fe682686ec0ac684d107" integrity sha512-0TNiGstbQmCFwt4akjjBg5pLRTSyj/PkWQ1ZoO2zntmg9yLqSRxwEa4iCfQLGjqhiqBfOJa7W/E8wfGrTDmlZQ== class-utils@^0.3.5: version "0.3.6" - resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" + resolved "https://registry.npmjs.org/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== dependencies: arr-union "^3.1.0" @@ -2196,28 +2352,28 @@ class-utils@^0.3.5: clean-regexp@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/clean-regexp/-/clean-regexp-1.0.0.tgz#8df7c7aae51fd36874e8f8d05b9180bc11a3fed7" + resolved "https://registry.npmjs.org/clean-regexp/-/clean-regexp-1.0.0.tgz#8df7c7aae51fd36874e8f8d05b9180bc11a3fed7" integrity sha512-GfisEZEJvzKrmGWkvfhgzcz/BllN1USeqD2V6tg14OAOgaCD2Z/PUEuxnAZ/nPvmaHRG7a8y77p1T/IRQ4D1Hw== dependencies: escape-string-regexp "^1.0.5" clean-stack@^4.0.0: version "4.2.0" - resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-4.2.0.tgz#c464e4cde4ac789f4e0735c5d75beb49d7b30b31" + resolved "https://registry.npmjs.org/clean-stack/-/clean-stack-4.2.0.tgz#c464e4cde4ac789f4e0735c5d75beb49d7b30b31" integrity sha512-LYv6XPxoyODi36Dp976riBtSY27VmFo+MKqEU9QCCWyTrdEPDog+RWA7xQWHi6Vbp61j5c4cdzzX1NidnwtUWg== dependencies: escape-string-regexp "5.0.0" cli-cursor@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307" + resolved "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307" integrity sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw== dependencies: restore-cursor "^3.1.0" cliui@^3.2.0: version "3.2.0" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-3.2.0.tgz#120601537a916d29940f934da3b48d585a39213d" + resolved "https://registry.npmjs.org/cliui/-/cliui-3.2.0.tgz#120601537a916d29940f934da3b48d585a39213d" integrity sha512-0yayqDxWQbqk3ojkYqUKqaAQ6AfNKeKWRNA8kR0WXzAsdHpP4BIaOmMAG87JGuO6qcobyW4GjxHd9PmhEd+T9w== dependencies: string-width "^1.0.1" @@ -2226,7 +2382,7 @@ cliui@^3.2.0: cliui@^8.0.1: version "8.0.1" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" + resolved "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" integrity sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ== dependencies: string-width "^4.2.0" @@ -2235,22 +2391,22 @@ cliui@^8.0.1: clone-buffer@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/clone-buffer/-/clone-buffer-1.0.0.tgz#e3e25b207ac4e701af721e2cb5a16792cac3dc58" + resolved "https://registry.npmjs.org/clone-buffer/-/clone-buffer-1.0.0.tgz#e3e25b207ac4e701af721e2cb5a16792cac3dc58" integrity sha512-KLLTJWrvwIP+OPfMn0x2PheDEP20RPUcGXj/ERegTgdmPEZylALQldygiqrPPu8P45uNuPs7ckmReLY6v/iA5g== clone-stats@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/clone-stats/-/clone-stats-1.0.0.tgz#b3782dff8bb5474e18b9b6bf0fdfe782f8777680" + resolved "https://registry.npmjs.org/clone-stats/-/clone-stats-1.0.0.tgz#b3782dff8bb5474e18b9b6bf0fdfe782f8777680" integrity sha512-au6ydSpg6nsrigcZ4m8Bc9hxjeW+GJ8xh5G3BJCMt4WXe1H10UNaVOamqQTmrx1kjVuxAHIQSNU6hY4Nsn9/ag== clone@^2.1.1, clone@^2.1.2: version "2.1.2" - resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" + resolved "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" integrity sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w== cloneable-readable@^1.0.0: version "1.1.3" - resolved "https://registry.yarnpkg.com/cloneable-readable/-/cloneable-readable-1.1.3.tgz#120a00cb053bfb63a222e709f9683ea2e11d8cec" + resolved "https://registry.npmjs.org/cloneable-readable/-/cloneable-readable-1.1.3.tgz#120a00cb053bfb63a222e709f9683ea2e11d8cec" integrity sha512-2EF8zTQOxYq70Y4XKtorQupqF0m49MBz2/yf5Bj+MHjvpG3Hy7sImifnqD6UA+TKYxeSV+u6qqQPawN5UvnpKQ== dependencies: inherits "^2.0.1" @@ -2259,22 +2415,22 @@ cloneable-readable@^1.0.0: co@^4.6.0: version "4.6.0" - resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + resolved "https://registry.npmjs.org/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== code-point-at@^1.0.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" + resolved "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" integrity sha512-RpAVKQA5T63xEj6/giIbUEtZwJ4UFIc3ZtvEkiaUERylqe8xb5IvqcgOurZLahv93CLKfxcw5YI+DZcUBRyLXA== collect-v8-coverage@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" - integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== + version "1.0.2" + resolved "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz#c0b29bcd33bcd0779a1344c2136051e6afd3d9e9" + integrity sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q== collection-map@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/collection-map/-/collection-map-1.0.0.tgz#aea0f06f8d26c780c2b75494385544b2255af18c" + resolved "https://registry.npmjs.org/collection-map/-/collection-map-1.0.0.tgz#aea0f06f8d26c780c2b75494385544b2255af18c" integrity sha512-5D2XXSpkOnleOI21TG7p3T0bGAsZ/XknZpKBmGYyluO8pw4zA3K8ZlrBIbC4FXg3m6z/RNFiUFfT2sQK01+UHA== dependencies: arr-map "^2.0.2" @@ -2283,7 +2439,7 @@ collection-map@^1.0.0: collection-visit@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" + resolved "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" integrity sha512-lNkKvzEeMBBjUGHZ+q6z9pSJla0KWAQPvtzhEV9+iGyQYG+pBpl7xKDhxoNSOZH2hhv0v5k0y2yAM4o4SjoSkw== dependencies: map-visit "^1.0.0" @@ -2291,36 +2447,36 @@ collection-visit@^1.0.0: color-convert@^1.9.0: version "1.9.3" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + resolved "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== dependencies: color-name "1.1.3" color-convert@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + resolved "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== dependencies: color-name "~1.1.4" color-name@1.1.3: version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== color-name@~1.1.4: version "1.1.4" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== color-support@^1.1.3: version "1.1.3" - resolved "https://registry.yarnpkg.com/color-support/-/color-support-1.1.3.tgz#93834379a1cc9a0c61f82f52f0d04322251bd5a2" + resolved "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz#93834379a1cc9a0c61f82f52f0d04322251bd5a2" integrity sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg== -command-line-args@5.2.1, command-line-args@^5.2.1: +command-line-args@^5.2.1: version "5.2.1" - resolved "https://registry.yarnpkg.com/command-line-args/-/command-line-args-5.2.1.tgz#c44c32e437a57d7c51157696893c5909e9cec42e" + resolved "https://registry.npmjs.org/command-line-args/-/command-line-args-5.2.1.tgz#c44c32e437a57d7c51157696893c5909e9cec42e" integrity sha512-H4UfQhZyakIjC74I9d34fGYDwk3XpSr17QhEd0Q3I9Xq1CETHo4Hcuo87WyWHpAF1aSLjLRf5lD9ZGX2qStUvg== dependencies: array-back "^3.1.0" @@ -2328,9 +2484,9 @@ command-line-args@5.2.1, command-line-args@^5.2.1: lodash.camelcase "^4.3.0" typical "^4.0.0" -command-line-usage@7.0.1, command-line-usage@^7.0.0: +command-line-usage@^7.0.0, command-line-usage@^7.0.1: version "7.0.1" - resolved "https://registry.yarnpkg.com/command-line-usage/-/command-line-usage-7.0.1.tgz#e540afef4a4f3bc501b124ffde33956309100655" + resolved "https://registry.npmjs.org/command-line-usage/-/command-line-usage-7.0.1.tgz#e540afef4a4f3bc501b124ffde33956309100655" integrity sha512-NCyznE//MuTjwi3y84QVUGEOT+P5oto1e1Pk/jFPVdPPfsG03qpTIl3yw6etR+v73d0lXsoojRpvbru2sqePxQ== dependencies: array-back "^6.2.2" @@ -2340,37 +2496,37 @@ command-line-usage@7.0.1, command-line-usage@^7.0.0: commander@^2.20.0: version "2.20.3" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" + resolved "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== commander@^6.1.0: version "6.2.1" - resolved "https://registry.yarnpkg.com/commander/-/commander-6.2.1.tgz#0792eb682dfbc325999bb2b84fddddba110ac73c" + resolved "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz#0792eb682dfbc325999bb2b84fddddba110ac73c" integrity sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA== commander@^7.2.0: version "7.2.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" + resolved "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== common-tags@^1.8.0: version "1.8.2" - resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.2.tgz#94ebb3c076d26032745fd54face7f688ef5ac9c6" + resolved "https://registry.npmjs.org/common-tags/-/common-tags-1.8.2.tgz#94ebb3c076d26032745fd54face7f688ef5ac9c6" integrity sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA== component-emitter@^1.2.1: version "1.3.0" - resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" + resolved "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== concat-map@0.0.1: version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + resolved "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== concat-stream@^1.6.0: version "1.6.2" - resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" + resolved "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== dependencies: buffer-from "^1.0.0" @@ -2380,22 +2536,22 @@ concat-stream@^1.6.0: convert-source-map@^1.0.0, convert-source-map@^1.5.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: version "1.9.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" + resolved "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== convert-source-map@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" + resolved "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== copy-descriptor@^0.1.0: version "0.1.1" - resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" + resolved "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" integrity sha512-XgZ0pFcakEUlbwQEVNg3+QAis1FyTL3Qel9FYy8pSkQqoG3PNoT0bOCQtOXcOkur21r2Eq2kI+IE+gsmAEVlYw== copy-props@^2.0.1: version "2.0.5" - resolved "https://registry.yarnpkg.com/copy-props/-/copy-props-2.0.5.tgz#03cf9ae328d4ebb36f8f1d804448a6af9ee3f2d2" + resolved "https://registry.npmjs.org/copy-props/-/copy-props-2.0.5.tgz#03cf9ae328d4ebb36f8f1d804448a6af9ee3f2d2" integrity sha512-XBlx8HSqrT0ObQwmSzM7WE5k8FxTV75h1DX1Z3n6NhQ/UYYAvInWYmG06vFt7hQZArE2fuO62aihiWIVQwh1sw== dependencies: each-props "^1.3.2" @@ -2403,24 +2559,37 @@ copy-props@^2.0.1: core-util-is@~1.0.0: version "1.0.3" - resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" + resolved "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== +create-jest@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/create-jest/-/create-jest-29.7.0.tgz#a355c5b3cb1e1af02ba177fe7afd7feee49a5320" + integrity sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q== + dependencies: + "@jest/types" "^29.6.3" + chalk "^4.0.0" + exit "^0.1.2" + graceful-fs "^4.2.9" + jest-config "^29.7.0" + jest-util "^29.7.0" + prompts "^2.0.1" + create-require@^1.1.0: version "1.1.1" - resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" + resolved "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== cross-env@7.0.3: version "7.0.3" - resolved "https://registry.yarnpkg.com/cross-env/-/cross-env-7.0.3.tgz#865264b29677dc015ba8418918965dd232fc54cf" + resolved "https://registry.npmjs.org/cross-env/-/cross-env-7.0.3.tgz#865264b29677dc015ba8418918965dd232fc54cf" integrity sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw== dependencies: cross-spawn "^7.0.1" cross-spawn@^7.0.0, cross-spawn@^7.0.1, cross-spawn@^7.0.2, cross-spawn@^7.0.3: version "7.0.3" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== dependencies: path-key "^3.1.0" @@ -2429,7 +2598,7 @@ cross-spawn@^7.0.0, cross-spawn@^7.0.1, cross-spawn@^7.0.2, cross-spawn@^7.0.3: css@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/css/-/css-3.0.0.tgz#4447a4d58fdd03367c516ca9f64ae365cee4aa5d" + resolved "https://registry.npmjs.org/css/-/css-3.0.0.tgz#4447a4d58fdd03367c516ca9f64ae365cee4aa5d" integrity sha512-DG9pFfwOrzc+hawpmqX/dHYHJG+Bsdb0klhyi1sDneOgGOXy9wQIC8hzyVp1e4NRYDBdxcylvywPkkXCHAzTyQ== dependencies: inherits "^2.0.4" @@ -2438,7 +2607,7 @@ css@^3.0.0: d@1, d@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" + resolved "https://registry.npmjs.org/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== dependencies: es5-ext "^0.10.50" @@ -2446,7 +2615,7 @@ d@1, d@^1.0.1: debug-fabulous@^1.0.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/debug-fabulous/-/debug-fabulous-1.1.0.tgz#af8a08632465224ef4174a9f06308c3c2a1ebc8e" + resolved "https://registry.npmjs.org/debug-fabulous/-/debug-fabulous-1.1.0.tgz#af8a08632465224ef4174a9f06308c3c2a1ebc8e" integrity sha512-GZqvGIgKNlUnHUPQhepnUZFIMoi3dgZKQBzKDeL2g7oJF9SNAji/AAu36dusFUas0O+pae74lNeoIPHqXWDkLg== dependencies: debug "3.X" @@ -2455,28 +2624,28 @@ debug-fabulous@^1.0.0: debug@3.X: version "3.2.7" - resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + resolved "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== dependencies: ms "^2.1.1" debug@^2.2.0, debug@^2.3.3: version "2.6.9" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.4: version "4.3.4" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + resolved "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== dependencies: ms "2.1.2" decamelize-keys@^1.1.0: version "1.1.1" - resolved "https://registry.yarnpkg.com/decamelize-keys/-/decamelize-keys-1.1.1.tgz#04a2d523b2f18d80d0158a43b895d56dff8d19d8" + resolved "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.1.tgz#04a2d523b2f18d80d0158a43b895d56dff8d19d8" integrity sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg== dependencies: decamelize "^1.1.0" @@ -2484,71 +2653,81 @@ decamelize-keys@^1.1.0: decamelize@^1.1.0, decamelize@^1.1.1: version "1.2.0" - resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" + resolved "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" integrity sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA== decamelize@^5.0.0: version "5.0.1" - resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-5.0.1.tgz#db11a92e58c741ef339fb0a2868d8a06a9a7b1e9" + resolved "https://registry.npmjs.org/decamelize/-/decamelize-5.0.1.tgz#db11a92e58c741ef339fb0a2868d8a06a9a7b1e9" integrity sha512-VfxadyCECXgQlkoEAjeghAr5gY3Hf+IKjKb+X8tGVDtveCjN+USwprd2q3QXBR9T1+x2DG0XZF5/w+7HAtSaXA== decode-uri-component@^0.2.0: version "0.2.2" - resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.2.tgz#e69dbe25d37941171dd540e024c444cd5188e1e9" + resolved "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz#e69dbe25d37941171dd540e024c444cd5188e1e9" integrity sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ== dedent@^1.0.0: version "1.5.1" - resolved "https://registry.yarnpkg.com/dedent/-/dedent-1.5.1.tgz#4f3fc94c8b711e9bb2800d185cd6ad20f2a90aff" + resolved "https://registry.npmjs.org/dedent/-/dedent-1.5.1.tgz#4f3fc94c8b711e9bb2800d185cd6ad20f2a90aff" integrity sha512-+LxW+KLWxu3HW3M2w2ympwtqPrqYRzU8fqi6Fhd18fBALe15blJPI/I4+UHveMVG6lJqB4JNd4UG0S5cnVHwIg== deep-is@^0.1.3: version "0.1.4" - resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + resolved "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== deepmerge@^4.2.2: version "4.3.1" - resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a" + resolved "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a" integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A== default-compare@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/default-compare/-/default-compare-1.0.0.tgz#cb61131844ad84d84788fb68fd01681ca7781a2f" + resolved "https://registry.npmjs.org/default-compare/-/default-compare-1.0.0.tgz#cb61131844ad84d84788fb68fd01681ca7781a2f" integrity sha512-QWfXlM0EkAbqOCbD/6HjdwT19j7WCkMyiRhWilc4H9/5h/RzTF9gv5LYh1+CmDV5d1rki6KAWLtQale0xt20eQ== dependencies: kind-of "^5.0.2" default-resolution@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/default-resolution/-/default-resolution-2.0.0.tgz#bcb82baa72ad79b426a76732f1a81ad6df26d684" + resolved "https://registry.npmjs.org/default-resolution/-/default-resolution-2.0.0.tgz#bcb82baa72ad79b426a76732f1a81ad6df26d684" integrity sha512-2xaP6GiwVwOEbXCGoJ4ufgC76m8cj805jrghScewJC2ZDsb9U0b4BIrba+xt/Uytyd0HvQ6+WymSRTfnYj59GQ== +define-data-property@^1.0.1: + version "1.1.0" + resolved "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.0.tgz#0db13540704e1d8d479a0656cf781267531b9451" + integrity sha512-UzGwzcjyv3OtAvolTj1GoyNYzfFR+iqbGjcnBEENZVCpM4/Ng1yhGNvS3lR/xDS74Tb2wGG9WzNSNIOS9UVb2g== + dependencies: + get-intrinsic "^1.2.1" + gopd "^1.0.1" + has-property-descriptors "^1.0.0" + define-properties@^1.1.4: - version "1.2.0" - resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.0.tgz#52988570670c9eacedd8064f4a990f2405849bd5" - integrity sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA== + version "1.2.1" + resolved "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz#10781cc616eb951a80a034bafcaa7377f6af2b6c" + integrity sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg== dependencies: + define-data-property "^1.0.1" has-property-descriptors "^1.0.0" object-keys "^1.1.1" define-property@^0.2.5: version "0.2.5" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" + resolved "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" integrity sha512-Rr7ADjQZenceVOAKop6ALkkRAmH1A4Gx9hV/7ZujPUN2rkATqFO0JZLZInbAjpZYoJ1gUx8MRMQVkYemcbMSTA== dependencies: is-descriptor "^0.1.0" define-property@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" + resolved "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" integrity sha512-cZTYKFWspt9jZsMscWo8sc/5lbPC9Q0N5nBLgb+Yd915iL3udB1uFgS3B8YCx66UVHq018DAVFoee7x+gxggeA== dependencies: is-descriptor "^1.0.0" define-property@^2.0.2: version "2.0.2" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" + resolved "https://registry.npmjs.org/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== dependencies: is-descriptor "^1.0.2" @@ -2556,7 +2735,7 @@ define-property@^2.0.2: del-cli@5.1.0: version "5.1.0" - resolved "https://registry.yarnpkg.com/del-cli/-/del-cli-5.1.0.tgz#740eca1c7a9eb13043e68d8a361cf0ff9a18d5c8" + resolved "https://registry.npmjs.org/del-cli/-/del-cli-5.1.0.tgz#740eca1c7a9eb13043e68d8a361cf0ff9a18d5c8" integrity sha512-xwMeh2acluWeccsfzE7VLsG3yTr7nWikbfw+xhMnpRrF15pGSkw+3/vJZWlGoE4I86UiLRNHicmKt4tkIX9Jtg== dependencies: del "^7.1.0" @@ -2564,7 +2743,7 @@ del-cli@5.1.0: del@7.1.0, del@^7.1.0: version "7.1.0" - resolved "https://registry.yarnpkg.com/del/-/del-7.1.0.tgz#0de0044d556b649ff05387f1fa7c885e155fd1b6" + resolved "https://registry.npmjs.org/del/-/del-7.1.0.tgz#0de0044d556b649ff05387f1fa7c885e155fd1b6" integrity sha512-v2KyNk7efxhlyHpjEvfyxaAihKKK0nWCuf6ZtqZcFFpQRG0bJ12Qsr0RpvsICMjAAZ8DOVCxrlqpxISlMHC4Kg== dependencies: globby "^13.1.2" @@ -2578,51 +2757,51 @@ del@7.1.0, del@^7.1.0: detect-file@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" + resolved "https://registry.npmjs.org/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" integrity sha512-DtCOLG98P007x7wiiOmfI0fi3eIKyWiLTGJ2MDnVi/E04lWGbf+JzrRHMm0rgIIZJGtHpKpbVgLWHrv8xXpc3Q== detect-newline@^2.0.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-2.1.0.tgz#f41f1c10be4b00e87b5f13da680759f2c5bfd3e2" + resolved "https://registry.npmjs.org/detect-newline/-/detect-newline-2.1.0.tgz#f41f1c10be4b00e87b5f13da680759f2c5bfd3e2" integrity sha512-CwffZFvlJffUg9zZA0uqrjQayUTC8ob94pnr5sFwaVv3IOmkfUHcWH+jXaQK3askE51Cqe8/9Ql/0uXNwqZ8Zg== detect-newline@^3.0.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" + resolved "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== -diff-sequences@^29.4.3: - version "29.4.3" - resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-29.4.3.tgz#9314bc1fabe09267ffeca9cbafc457d8499a13f2" - integrity sha512-ofrBgwpPhCD85kMKtE9RYFFq6OC1A89oW2vvgWZNCwxrUpRUILopY7lsYyMDSjc8g6U6aiO0Qubg6r4Wgt5ZnA== +diff-sequences@^29.6.3: + version "29.6.3" + resolved "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz#4deaf894d11407c51efc8418012f9e70b84ea921" + integrity sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q== diff@^4.0.1: version "4.0.2" - resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" + resolved "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== dir-glob@^3.0.1: version "3.0.1" - resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== dependencies: path-type "^4.0.0" doctrine@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + resolved "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== dependencies: esutils "^2.0.2" duplexer@^0.1.2: version "0.1.2" - resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" + resolved "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== duplexify@^3.6.0: version "3.7.1" - resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" + resolved "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz#2a4df5317f6ccfd91f86d6fd25d8d8a103b88309" integrity sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g== dependencies: end-of-stream "^1.0.0" @@ -2632,7 +2811,7 @@ duplexify@^3.6.0: each-props@^1.3.2: version "1.3.2" - resolved "https://registry.yarnpkg.com/each-props/-/each-props-1.3.2.tgz#ea45a414d16dd5cfa419b1a81720d5ca06892333" + resolved "https://registry.npmjs.org/each-props/-/each-props-1.3.2.tgz#ea45a414d16dd5cfa419b1a81720d5ca06892333" integrity sha512-vV0Hem3zAGkJAyU7JSjixeU66rwdynTAa1vofCrSA5fEln+m67Az9CcnkVD776/fsN/UjIWmBDoNRS6t6G9RfA== dependencies: is-plain-object "^2.0.1" @@ -2640,66 +2819,66 @@ each-props@^1.3.2: eastasianwidth@^0.2.0: version "0.2.0" - resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb" + resolved "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb" integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA== -electron-to-chromium@^1.4.411: - version "1.4.427" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.427.tgz#67e8069f7a864fc092fe2e09f196e68af5cb88a1" - integrity sha512-HK3r9l+Jm8dYAm1ctXEWIC+hV60zfcjS9UA5BDlYvnI5S7PU/yytjpvSrTNrSSRRkuu3tDyZhdkwIczh+0DWaw== +electron-to-chromium@^1.4.477: + version "1.4.520" + resolved "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.520.tgz#c19c25a10d87bd88a9aae2b76cae9235a50c2994" + integrity sha512-Frfus2VpYADsrh1lB3v/ft/WVFlVzOIm+Q0p7U7VqHI6qr7NWHYKe+Wif3W50n7JAFoBsWVsoU0+qDks6WQ60g== emittery@^0.13.1: version "0.13.1" - resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.13.1.tgz#c04b8c3457490e0847ae51fced3af52d338e3dad" + resolved "https://registry.npmjs.org/emittery/-/emittery-0.13.1.tgz#c04b8c3457490e0847ae51fced3af52d338e3dad" integrity sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ== emoji-regex@^8.0.0: version "8.0.0" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== emoji-regex@^9.2.2: version "9.2.2" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" + resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== end-of-stream@^1.0.0, end-of-stream@^1.1.0, end-of-stream@^1.4.4: version "1.4.4" - resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" + resolved "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== dependencies: once "^1.4.0" enhanced-resolve@^5.14.1: - version "5.14.1" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.14.1.tgz#de684b6803724477a4af5d74ccae5de52c25f6b3" - integrity sha512-Vklwq2vDKtl0y/vtwjSesgJ5MYS7Etuk5txS8VdKL4AOS1aUlD96zqIfsOSLQsdv3xgMRbtkWM8eG9XDfKUPow== + version "5.15.0" + resolved "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz#1af946c7d93603eb88e9896cee4904dc012e9c35" + integrity sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg== dependencies: graceful-fs "^4.2.4" tapable "^2.2.0" errno@^0.1.3: version "0.1.8" - resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.8.tgz#8bb3e9c7d463be4976ff888f76b4809ebc2e811f" + resolved "https://registry.npmjs.org/errno/-/errno-0.1.8.tgz#8bb3e9c7d463be4976ff888f76b4809ebc2e811f" integrity sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A== dependencies: prr "~1.0.1" error-ex@^1.2.0, error-ex@^1.3.1: version "1.3.2" - resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + resolved "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== dependencies: is-arrayish "^0.2.1" es-module-lexer@^1.2.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-1.3.0.tgz#6be9c9e0b4543a60cd166ff6f8b4e9dae0b0c16f" - integrity sha512-vZK7T0N2CBmBOixhmjdqx2gWVbFZ4DXZ/NyRMZVlJXPa7CyFS+/a4QQsDGDQy9ZfEzxFuNEsMLeQJnKP2p5/JA== + version "1.3.1" + resolved "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.3.1.tgz#c1b0dd5ada807a3b3155315911f364dc4e909db1" + integrity sha512-JUFAyicQV9mXc3YRxPnDlrfBKpqt6hUYzz9/boprUJHs4e4KVr3XwOF70doO6gwXUor6EWZJAyWAfKki84t20Q== es5-ext@^0.10.35, es5-ext@^0.10.46, es5-ext@^0.10.50, es5-ext@^0.10.53, es5-ext@~0.10.14, es5-ext@~0.10.2, es5-ext@~0.10.46: version "0.10.62" - resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.62.tgz#5e6adc19a6da524bf3d1e02bbc8960e5eb49a9a5" + resolved "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.62.tgz#5e6adc19a6da524bf3d1e02bbc8960e5eb49a9a5" integrity sha512-BHLqn0klhEpnOKSrzn/Xsz2UIW8j+cGmo9JLzr8BiUapV8hPL9+FliFqjwr9ngW7jWdnxv6eO+/LqyhJVqgrjA== dependencies: es6-iterator "^2.0.3" @@ -2708,7 +2887,7 @@ es5-ext@^0.10.35, es5-ext@^0.10.46, es5-ext@^0.10.50, es5-ext@^0.10.53, es5-ext@ es6-iterator@^2.0.1, es6-iterator@^2.0.3: version "2.0.3" - resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" + resolved "https://registry.npmjs.org/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" integrity sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g== dependencies: d "1" @@ -2717,7 +2896,7 @@ es6-iterator@^2.0.1, es6-iterator@^2.0.3: es6-symbol@^3.1.1, es6-symbol@^3.1.3: version "3.1.3" - resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" + resolved "https://registry.npmjs.org/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== dependencies: d "^1.0.1" @@ -2725,7 +2904,7 @@ es6-symbol@^3.1.1, es6-symbol@^3.1.3: es6-weak-map@^2.0.1, es6-weak-map@^2.0.3: version "2.0.3" - resolved "https://registry.yarnpkg.com/es6-weak-map/-/es6-weak-map-2.0.3.tgz#b6da1f16cc2cc0d9be43e6bdbfc5e7dfcdf31d53" + resolved "https://registry.npmjs.org/es6-weak-map/-/es6-weak-map-2.0.3.tgz#b6da1f16cc2cc0d9be43e6bdbfc5e7dfcdf31d53" integrity sha512-p5um32HOTO1kP+w7PRnB+5lQ43Z6muuMuIMffvDN8ZB4GcnjLBV6zGStpbASIMk4DCAvEaamhe2zhyCb/QXXsA== dependencies: d "1" @@ -2735,12 +2914,12 @@ es6-weak-map@^2.0.1, es6-weak-map@^2.0.3: esbuild-plugin-alias@0.2.1: version "0.2.1" - resolved "https://registry.yarnpkg.com/esbuild-plugin-alias/-/esbuild-plugin-alias-0.2.1.tgz#45a86cb941e20e7c2bc68a2bea53562172494fcb" + resolved "https://registry.npmjs.org/esbuild-plugin-alias/-/esbuild-plugin-alias-0.2.1.tgz#45a86cb941e20e7c2bc68a2bea53562172494fcb" integrity sha512-jyfL/pwPqaFXyKnj8lP8iLk6Z0m099uXR45aSN8Av1XD4vhvQutxxPzgA2bTcAwQpa1zCXDcWOlhFgyP3GKqhQ== esbuild@0.19.2: version "0.19.2" - resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.19.2.tgz#b1541828a89dfb6f840d38538767c6130dca2aac" + resolved "https://registry.npmjs.org/esbuild/-/esbuild-0.19.2.tgz#b1541828a89dfb6f840d38538767c6130dca2aac" integrity sha512-G6hPax8UbFakEj3hWO0Vs52LQ8k3lnBhxZWomUJDxfz3rZTLqF5k/FCzuNdLx2RbpBiQQF9H9onlDDH1lZsnjg== optionalDependencies: "@esbuild/android-arm" "0.19.2" @@ -2768,7 +2947,7 @@ esbuild@0.19.2: esbuild@^0.17.11: version "0.17.19" - resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.17.19.tgz#087a727e98299f0462a3d0bcdd9cd7ff100bd955" + resolved "https://registry.npmjs.org/esbuild/-/esbuild-0.17.19.tgz#087a727e98299f0462a3d0bcdd9cd7ff100bd955" integrity sha512-XQ0jAPFkK/u3LcVRcvVHQcTIqD6E2H1fvZMA5dQPSOWb3suUbWbfbRf94pjc0bNzRYLfIrDRQXr7X+LHIm5oHw== optionalDependencies: "@esbuild/android-arm" "0.17.19" @@ -2796,27 +2975,27 @@ esbuild@^0.17.11: escalade@^3.1.1: version "3.1.1" - resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + resolved "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== escape-string-regexp@5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz#4683126b500b61762f2dbebace1806e8be31b1c8" + resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz#4683126b500b61762f2dbebace1806e8be31b1c8" integrity sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw== escape-string-regexp@^1.0.3, escape-string-regexp@^1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== escape-string-regexp@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== escape-string-regexp@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== eslint-plugin-jest@27.4.2: @@ -2828,7 +3007,7 @@ eslint-plugin-jest@27.4.2: eslint-plugin-unicorn@47.0.0: version "47.0.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-unicorn/-/eslint-plugin-unicorn-47.0.0.tgz#960e9d3789f656ba3e21982420793b069a911011" + resolved "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-47.0.0.tgz#960e9d3789f656ba3e21982420793b069a911011" integrity sha512-ivB3bKk7fDIeWOUmmMm9o3Ax9zbMz1Bsza/R2qm46ufw4T6VBFBaJIR1uN3pCKSmSXm8/9Nri8V+iUut1NhQGA== dependencies: "@babel/helper-validator-identifier" "^7.19.1" @@ -2850,46 +3029,47 @@ eslint-plugin-unicorn@47.0.0: eslint-scope@5.1.1, eslint-scope@^5.1.1: version "5.1.1" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" + resolved "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== dependencies: esrecurse "^4.3.0" estraverse "^4.1.1" -eslint-scope@^7.2.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.2.0.tgz#f21ebdafda02352f103634b96dd47d9f81ca117b" - integrity sha512-DYj5deGlHBfMt15J7rdtyKNq/Nqlv5KfU4iodrQ019XESsRnwXH9KAE0y3cwtUHDo2ob7CypAnCqefh6vioWRw== +eslint-scope@^7.2.2: + version "7.2.2" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.2.2.tgz#deb4f92563390f32006894af62a22dba1c46423f" + integrity sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg== dependencies: esrecurse "^4.3.0" estraverse "^5.2.0" -eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.1: - version "3.4.1" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.1.tgz#c22c48f48942d08ca824cc526211ae400478a994" - integrity sha512-pZnmmLwYzf+kWaM/Qgrvpen51upAktaaiI01nsJD/Yr3lMOdNtq0cxkrrg16w64VtisN6okbs7Q8AfGqj4c9fA== +eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.1, eslint-visitor-keys@^3.4.3: + version "3.4.3" + resolved "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz#0cd72fe8550e3c2eae156a96a4dddcd1c8ac5800" + integrity sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag== -eslint@8.42.0: - version "8.42.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.42.0.tgz#7bebdc3a55f9ed7167251fe7259f75219cade291" - integrity sha512-ulg9Ms6E1WPf67PHaEY4/6E2tEn5/f7FXGzr3t9cBMugOmf1INYvuUwwh1aXQN4MfJ6a5K2iNwP3w4AColvI9A== +eslint@8.52.0: + version "8.52.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.52.0.tgz#d0cd4a1fac06427a61ef9242b9353f36ea7062fc" + integrity sha512-zh/JHnaixqHZsolRB/w9/02akBk9EPrOs9JwcTP2ek7yL5bVvXuRariiaAjjoJ5DvuwQ1WAE/HsMz+w17YgBCg== dependencies: "@eslint-community/eslint-utils" "^4.2.0" - "@eslint-community/regexpp" "^4.4.0" - "@eslint/eslintrc" "^2.0.3" - "@eslint/js" "8.42.0" - "@humanwhocodes/config-array" "^0.11.10" + "@eslint-community/regexpp" "^4.6.1" + "@eslint/eslintrc" "^2.1.2" + "@eslint/js" "8.52.0" + "@humanwhocodes/config-array" "^0.11.13" "@humanwhocodes/module-importer" "^1.0.1" "@nodelib/fs.walk" "^1.2.8" - ajv "^6.10.0" + "@ungap/structured-clone" "^1.2.0" + ajv "^6.12.4" chalk "^4.0.0" cross-spawn "^7.0.2" debug "^4.3.2" doctrine "^3.0.0" escape-string-regexp "^4.0.0" - eslint-scope "^7.2.0" - eslint-visitor-keys "^3.4.1" - espree "^9.5.2" + eslint-scope "^7.2.2" + eslint-visitor-keys "^3.4.3" + espree "^9.6.1" esquery "^1.4.2" esutils "^2.0.2" fast-deep-equal "^3.1.3" @@ -2899,7 +3079,6 @@ eslint@8.42.0: globals "^13.19.0" graphemer "^1.4.0" ignore "^5.2.0" - import-fresh "^3.0.0" imurmurhash "^0.1.4" is-glob "^4.0.0" is-path-inside "^3.0.3" @@ -2909,66 +3088,65 @@ eslint@8.42.0: lodash.merge "^4.6.2" minimatch "^3.1.2" natural-compare "^1.4.0" - optionator "^0.9.1" + optionator "^0.9.3" strip-ansi "^6.0.1" - strip-json-comments "^3.1.0" text-table "^0.2.0" "esm@https://github.com/jsg2021/esm/releases/download/v3.x.x-pr883/esm-3.x.x-pr883.tgz": version "3.2.25" resolved "https://github.com/jsg2021/esm/releases/download/v3.x.x-pr883/esm-3.x.x-pr883.tgz#c463cfa4e14aceea6b7cd7e669ef90de072ea60a" -espree@^9.5.2: - version "9.5.2" - resolved "https://registry.yarnpkg.com/espree/-/espree-9.5.2.tgz#e994e7dc33a082a7a82dceaf12883a829353215b" - integrity sha512-7OASN1Wma5fum5SrNhFMAMJxOUAbhyfQ8dQ//PJaJbNw0URTPWqIghHWt1MmAANKhHZIYOHruW4Kw4ruUWOdGw== +espree@^9.6.0, espree@^9.6.1: + version "9.6.1" + resolved "https://registry.yarnpkg.com/espree/-/espree-9.6.1.tgz#a2a17b8e434690a5432f2f8018ce71d331a48c6f" + integrity sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ== dependencies: - acorn "^8.8.0" + acorn "^8.9.0" acorn-jsx "^5.3.2" eslint-visitor-keys "^3.4.1" esprima@^4.0.0: version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + resolved "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== esquery@^1.4.2, esquery@^1.5.0: version "1.5.0" - resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.5.0.tgz#6ce17738de8577694edd7361c57182ac8cb0db0b" + resolved "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz#6ce17738de8577694edd7361c57182ac8cb0db0b" integrity sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg== dependencies: estraverse "^5.1.0" esrecurse@^4.3.0: version "4.3.0" - resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + resolved "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== dependencies: estraverse "^5.2.0" estraverse@^4.1.1: version "4.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + resolved "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== estraverse@^5.1.0, estraverse@^5.2.0: version "5.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + resolved "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== estree-walker@^2.0.2: version "2.0.2" - resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-2.0.2.tgz#52f010178c2a4c117a7757cfe942adb7d2da4cac" + resolved "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz#52f010178c2a4c117a7757cfe942adb7d2da4cac" integrity sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w== esutils@^2.0.2: version "2.0.3" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + resolved "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== event-emitter@^0.3.5: version "0.3.5" - resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39" + resolved "https://registry.npmjs.org/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39" integrity sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA== dependencies: d "1" @@ -2976,12 +3154,12 @@ event-emitter@^0.3.5: events@^3.2.0: version "3.3.0" - resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" + resolved "https://registry.npmjs.org/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== execa@^5.0.0: version "5.1.1" - resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + resolved "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== dependencies: cross-spawn "^7.0.3" @@ -2996,12 +3174,12 @@ execa@^5.0.0: exit@^0.1.2: version "0.1.2" - resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" + resolved "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== expand-brackets@^2.1.4: version "2.1.4" - resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" + resolved "https://registry.npmjs.org/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" integrity sha512-w/ozOKR9Obk3qoWeY/WDi6MFta9AoMR+zud60mdnbniMcBxRuFJyDt2LdX/14A1UABeqk+Uk+LDfUpvoGKppZA== dependencies: debug "^2.3.3" @@ -3014,40 +3192,39 @@ expand-brackets@^2.1.4: expand-tilde@^2.0.0, expand-tilde@^2.0.2: version "2.0.2" - resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" + resolved "https://registry.npmjs.org/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502" integrity sha512-A5EmesHW6rfnZ9ysHQjPdJRni0SRar0tjtG5MNtm9n5TUvsYU8oozprtRD4AqHxcZWWlVuAmQo2nWKfN9oyjTw== dependencies: homedir-polyfill "^1.0.1" -expect@^29.0.0, expect@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/expect/-/expect-29.6.2.tgz#7b08e83eba18ddc4a2cf62b5f2d1918f5cd84521" - integrity sha512-iAErsLxJ8C+S02QbLAwgSGSezLQK+XXRDt8IuFXFpwCNw2ECmzZSmjKcCaFVp5VRMk+WAvz6h6jokzEzBFZEuA== +expect@^29.0.0, expect@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/expect/-/expect-29.7.0.tgz#578874590dcb3214514084c08115d8aee61e11bc" + integrity sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw== dependencies: - "@jest/expect-utils" "^29.6.2" - "@types/node" "*" - jest-get-type "^29.4.3" - jest-matcher-utils "^29.6.2" - jest-message-util "^29.6.2" - jest-util "^29.6.2" + "@jest/expect-utils" "^29.7.0" + jest-get-type "^29.6.3" + jest-matcher-utils "^29.7.0" + jest-message-util "^29.7.0" + jest-util "^29.7.0" ext@^1.1.2: version "1.7.0" - resolved "https://registry.yarnpkg.com/ext/-/ext-1.7.0.tgz#0ea4383c0103d60e70be99e9a7f11027a33c4f5f" + resolved "https://registry.npmjs.org/ext/-/ext-1.7.0.tgz#0ea4383c0103d60e70be99e9a7f11027a33c4f5f" integrity sha512-6hxeJYaL110a9b5TEJSj0gojyHQAmA2ch5Os+ySCiA1QGdS697XWY1pzsrSjqA9LDEEgdB/KypIlR59RcLuHYw== dependencies: type "^2.7.2" extend-shallow@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" + resolved "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" integrity sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug== dependencies: is-extendable "^0.1.0" extend-shallow@^3.0.0, extend-shallow@^3.0.2: version "3.0.2" - resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" + resolved "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" integrity sha512-BwY5b5Ql4+qZoefgMj2NUmx+tehVTH/Kf4k1ZEtOHNFcm2wSxMRo992l6X3TIgni2eZVTZ85xMOjF31fwZAj6Q== dependencies: assign-symbols "^1.0.0" @@ -3055,12 +3232,12 @@ extend-shallow@^3.0.0, extend-shallow@^3.0.2: extend@^3.0.0: version "3.0.2" - resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" + resolved "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== extglob@^2.0.4: version "2.0.4" - resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" + resolved "https://registry.npmjs.org/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== dependencies: array-unique "^0.3.2" @@ -3074,7 +3251,7 @@ extglob@^2.0.4: fancy-log@^1.3.2, fancy-log@^1.3.3: version "1.3.3" - resolved "https://registry.yarnpkg.com/fancy-log/-/fancy-log-1.3.3.tgz#dbc19154f558690150a23953a0adbd035be45fc7" + resolved "https://registry.npmjs.org/fancy-log/-/fancy-log-1.3.3.tgz#dbc19154f558690150a23953a0adbd035be45fc7" integrity sha512-k9oEhlyc0FrVh25qYuSELjr8oxsCoc4/LEZfg2iJJrfEk/tZL9bCoJE47gqAvI2m/AUjluCS4+3I0eTx8n3AEw== dependencies: ansi-gray "^0.1.1" @@ -3084,25 +3261,25 @@ fancy-log@^1.3.2, fancy-log@^1.3.3: fancy-log@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/fancy-log/-/fancy-log-2.0.0.tgz#cad207b8396d69ae4796d74d17dff5f68b2f7343" + resolved "https://registry.npmjs.org/fancy-log/-/fancy-log-2.0.0.tgz#cad207b8396d69ae4796d74d17dff5f68b2f7343" integrity sha512-9CzxZbACXMUXW13tS0tI8XsGGmxWzO2DmYrGuBJOJ8k8q2K7hwfJA5qHjuPPe8wtsco33YR9wc+Rlr5wYFvhSA== dependencies: color-support "^1.1.3" fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: version "3.1.3" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + resolved "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== fast-fifo@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/fast-fifo/-/fast-fifo-1.2.0.tgz#2ee038da2468e8623066dee96958b0c1763aa55a" - integrity sha512-NcvQXt7Cky1cNau15FWy64IjuO8X0JijhTBBrJj1YlxlDfRkJXNaK9RFUjwpfDPzMdv7wB38jr53l9tkNLxnWg== + version "1.3.2" + resolved "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz#286e31de96eb96d38a97899815740ba2a4f3640c" + integrity sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ== -fast-glob@^3.2.11, fast-glob@^3.2.9: - version "3.2.12" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" - integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== +fast-glob@^3.2.9, fast-glob@^3.3.0: + version "3.3.1" + resolved "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.1.tgz#784b4e897340f3dbbef17413b3f11acf03c874c4" + integrity sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg== dependencies: "@nodelib/fs.stat" "^2.0.2" "@nodelib/fs.walk" "^1.2.3" @@ -3112,53 +3289,53 @@ fast-glob@^3.2.11, fast-glob@^3.2.9: fast-json-stable-stringify@2.x, fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + resolved "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== fast-levenshtein@^1.0.0: version "1.1.4" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-1.1.4.tgz#e6a754cc8f15e58987aa9cbd27af66fd6f4e5af9" + resolved "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-1.1.4.tgz#e6a754cc8f15e58987aa9cbd27af66fd6f4e5af9" integrity sha512-Ia0sQNrMPXXkqVFt6w6M1n1oKo3NfKs+mvaV811Jwir7vAk9a6PVV9VPYf6X3BU97QiLEmuW3uXH9u87zDFfdw== fast-levenshtein@^2.0.6: version "2.0.6" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + resolved "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== fastq@^1.6.0: version "1.15.0" - resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.15.0.tgz#d04d07c6a2a68fe4599fea8d2e103a937fae6b3a" + resolved "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz#d04d07c6a2a68fe4599fea8d2e103a937fae6b3a" integrity sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw== dependencies: reusify "^1.0.4" fb-watchman@^2.0.0: version "2.0.2" - resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.2.tgz#e9524ee6b5c77e9e5001af0f85f3adbb8623255c" + resolved "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz#e9524ee6b5c77e9e5001af0f85f3adbb8623255c" integrity sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA== dependencies: bser "2.1.1" file-entry-cache@^6.0.1: version "6.0.1" - resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + resolved "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== dependencies: flat-cache "^3.0.4" file-uri-to-path@1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd" + resolved "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd" integrity sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw== filesize@^9.0.11: version "9.0.11" - resolved "https://registry.yarnpkg.com/filesize/-/filesize-9.0.11.tgz#4ac3a42c084232dd9b2a1da0107f32d42fcfa5e4" + resolved "https://registry.npmjs.org/filesize/-/filesize-9.0.11.tgz#4ac3a42c084232dd9b2a1da0107f32d42fcfa5e4" integrity sha512-gTAiTtI0STpKa5xesyTA9hA3LX4ga8sm2nWRcffEa1L/5vQwb4mj2MdzMkoHoGv4QzfDshQZuYscQSf8c4TKOA== fill-range@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" + resolved "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" integrity sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ== dependencies: extend-shallow "^2.0.1" @@ -3168,21 +3345,21 @@ fill-range@^4.0.0: fill-range@^7.0.1: version "7.0.1" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + resolved "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== dependencies: to-regex-range "^5.0.1" find-replace@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/find-replace/-/find-replace-3.0.0.tgz#3e7e23d3b05167a76f770c9fbd5258b0def68c38" + resolved "https://registry.npmjs.org/find-replace/-/find-replace-3.0.0.tgz#3e7e23d3b05167a76f770c9fbd5258b0def68c38" integrity sha512-6Tb2myMioCAgv5kfvP5/PkZZ/ntTpVK39fHY7WkWBgvbeE+VHd/tZuZ4mrC+bxh4cfOZeYKVPaJIZtZXV7GNCQ== dependencies: array-back "^3.0.1" find-up@^1.0.0: version "1.1.2" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" + resolved "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" integrity sha512-jvElSjyuo4EMQGoTwo1uJU5pQMwTW5lS1x05zzfJuTIyLR3zwO27LYrxNg+dlvKpGOuGy/MzBdXh80g0ve5+HA== dependencies: path-exists "^2.0.0" @@ -3190,7 +3367,7 @@ find-up@^1.0.0: find-up@^4.0.0, find-up@^4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + resolved "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== dependencies: locate-path "^5.0.0" @@ -3198,7 +3375,7 @@ find-up@^4.0.0, find-up@^4.1.0: find-up@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" + resolved "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== dependencies: locate-path "^6.0.0" @@ -3206,7 +3383,7 @@ find-up@^5.0.0: findup-sync@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-2.0.0.tgz#9326b1488c22d1a6088650a86901b2d9a90a2cbc" + resolved "https://registry.npmjs.org/findup-sync/-/findup-sync-2.0.0.tgz#9326b1488c22d1a6088650a86901b2d9a90a2cbc" integrity sha512-vs+3unmJT45eczmcAZ6zMJtxN3l/QXeccaXQx5cu/MeJMhewVfoWZqibRkOxPnmoR59+Zy5hjabfQc6JLSah4g== dependencies: detect-file "^1.0.0" @@ -3216,7 +3393,7 @@ findup-sync@^2.0.0: findup-sync@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/findup-sync/-/findup-sync-3.0.0.tgz#17b108f9ee512dfb7a5c7f3c8b27ea9e1a9c08d1" + resolved "https://registry.npmjs.org/findup-sync/-/findup-sync-3.0.0.tgz#17b108f9ee512dfb7a5c7f3c8b27ea9e1a9c08d1" integrity sha512-YbffarhcicEhOrm4CtrwdKBdCuz576RLdhJDsIfvNtxUuhdRet1qZcsMjqbePtAseKdAnDyM/IyXbu7PRPRLYg== dependencies: detect-file "^1.0.0" @@ -3226,7 +3403,7 @@ findup-sync@^3.0.0: fined@^1.0.1: version "1.2.0" - resolved "https://registry.yarnpkg.com/fined/-/fined-1.2.0.tgz#d00beccf1aa2b475d16d423b0238b713a2c4a37b" + resolved "https://registry.npmjs.org/fined/-/fined-1.2.0.tgz#d00beccf1aa2b475d16d423b0238b713a2c4a37b" integrity sha512-ZYDqPLGxDkDhDZBjZBb+oD1+j0rA4E0pXY50eplAAOPg2N/gUBSSk5IM1/QhPfyVo19lJ+CvXpqfvk+b2p/8Ng== dependencies: expand-tilde "^2.0.2" @@ -3237,30 +3414,31 @@ fined@^1.0.1: flagged-respawn@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/flagged-respawn/-/flagged-respawn-1.0.1.tgz#e7de6f1279ddd9ca9aac8a5971d618606b3aab41" + resolved "https://registry.npmjs.org/flagged-respawn/-/flagged-respawn-1.0.1.tgz#e7de6f1279ddd9ca9aac8a5971d618606b3aab41" integrity sha512-lNaHNVymajmk0OJMBn8fVUAU1BtDeKIqKoVhk4xAALB57aALg6b4W0MfJ/cUE0g9YBXy5XhSlPIpYIJ7HaY/3Q== flat-cache@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" - integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + version "3.1.0" + resolved "https://registry.npmjs.org/flat-cache/-/flat-cache-3.1.0.tgz#0e54ab4a1a60fe87e2946b6b00657f1c99e1af3f" + integrity sha512-OHx4Qwrrt0E4jEIcI5/Xb+f+QmJYNj2rrK8wiIdQOIrB9WrrJL8cjZvXdXuBTkkEwEqLycb5BeZDV1o2i9bTew== dependencies: - flatted "^3.1.0" + flatted "^3.2.7" + keyv "^4.5.3" rimraf "^3.0.2" -flatbuffers@23.5.26: +flatbuffers@^23.5.26: version "23.5.26" - resolved "https://registry.yarnpkg.com/flatbuffers/-/flatbuffers-23.5.26.tgz#01358e272a61239f0faf3bfbe4e014f3ace9d746" + resolved "https://registry.npmjs.org/flatbuffers/-/flatbuffers-23.5.26.tgz#01358e272a61239f0faf3bfbe4e014f3ace9d746" integrity sha512-vE+SI9vrJDwi1oETtTIFldC/o9GsVKRM+s6EL0nQgxXlYV1Vc4Tk30hj4xGICftInKQKj1F3up2n8UbIVobISQ== -flatted@^3.1.0: +flatted@^3.2.7: version "3.2.7" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" + resolved "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== flush-write-stream@^1.0.2: version "1.1.1" - resolved "https://registry.yarnpkg.com/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8" + resolved "https://registry.npmjs.org/flush-write-stream/-/flush-write-stream-1.1.1.tgz#8dd7d873a1babc207d94ead0c2e0e44276ebf2e8" integrity sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w== dependencies: inherits "^2.0.3" @@ -3268,19 +3446,19 @@ flush-write-stream@^1.0.2: for-in@^1.0.1, for-in@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" + resolved "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" integrity sha512-7EwmXrOjyL+ChxMhmG5lnW9MPt1aIeZEwKhQzoBUdTV0N3zuwWDZYVJatDvZ2OyzPUvdIAZDsCetk3coyMfcnQ== for-own@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/for-own/-/for-own-1.0.0.tgz#c63332f415cedc4b04dbfe70cf836494c53cb44b" + resolved "https://registry.npmjs.org/for-own/-/for-own-1.0.0.tgz#c63332f415cedc4b04dbfe70cf836494c53cb44b" integrity sha512-0OABksIGrxKK8K4kynWkQ7y1zounQxP+CWnyclVwj81KW3vlLlGUx57DKGcP/LH216GzqnstnPocF16Nxs0Ycg== dependencies: for-in "^1.0.1" foreground-child@^3.1.0: version "3.1.1" - resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-3.1.1.tgz#1d173e776d75d2772fed08efe4a0de1ea1b12d0d" + resolved "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz#1d173e776d75d2772fed08efe4a0de1ea1b12d0d" integrity sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg== dependencies: cross-spawn "^7.0.0" @@ -3288,14 +3466,14 @@ foreground-child@^3.1.0: fragment-cache@^0.2.1: version "0.2.1" - resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" + resolved "https://registry.npmjs.org/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" integrity sha512-GMBAbW9antB8iZRHLoGw0b3HANt57diZYFO/HL1JGIC1MjKrdmhxvrJbupnVvpys0zsz7yBApXdQyfepKly2kA== dependencies: map-cache "^0.2.2" fs-extra@^10.0.0: version "10.1.0" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" + resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== dependencies: graceful-fs "^4.2.0" @@ -3304,7 +3482,7 @@ fs-extra@^10.0.0: fs-mkdirp-stream@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/fs-mkdirp-stream/-/fs-mkdirp-stream-1.0.0.tgz#0b7815fc3201c6a69e14db98ce098c16935259eb" + resolved "https://registry.npmjs.org/fs-mkdirp-stream/-/fs-mkdirp-stream-1.0.0.tgz#0b7815fc3201c6a69e14db98ce098c16935259eb" integrity sha512-+vSd9frUnapVC2RZYfL3FCB2p3g4TBhaUmrsWlSudsGdnxIuUvBB2QM1VZeBtc49QFwrp+wQLrDs3+xxDgI5gQ== dependencies: graceful-fs "^4.1.11" @@ -3312,45 +3490,45 @@ fs-mkdirp-stream@^1.0.0: fs.realpath@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + resolved "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== fsevents@^1.2.7: version "1.2.13" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.13.tgz#f325cb0455592428bcf11b383370ef70e3bfcc38" + resolved "https://registry.npmjs.org/fsevents/-/fsevents-1.2.13.tgz#f325cb0455592428bcf11b383370ef70e3bfcc38" integrity sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw== dependencies: bindings "^1.5.0" nan "^2.12.1" fsevents@^2.3.2, fsevents@~2.3.2: - version "2.3.2" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" - integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + version "2.3.3" + resolved "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" + integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== function-bind@^1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + resolved "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== gensync@^1.0.0-beta.2: version "1.0.0-beta.2" - resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + resolved "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== get-caller-file@^1.0.1: version "1.0.3" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a" + resolved "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a" integrity sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w== get-caller-file@^2.0.5: version "2.0.5" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + resolved "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== -get-intrinsic@^1.0.2, get-intrinsic@^1.1.1: +get-intrinsic@^1.0.2, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3, get-intrinsic@^1.2.1: version "1.2.1" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.1.tgz#d295644fed4505fc9cde952c37ee12b477a83d82" + resolved "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.1.tgz#d295644fed4505fc9cde952c37ee12b477a83d82" integrity sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw== dependencies: function-bind "^1.1.1" @@ -3360,22 +3538,22 @@ get-intrinsic@^1.0.2, get-intrinsic@^1.1.1: get-package-type@^0.1.0: version "0.1.0" - resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + resolved "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== get-stream@^6.0.0: version "6.0.1" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + resolved "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== get-value@^2.0.3, get-value@^2.0.6: version "2.0.6" - resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" + resolved "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" integrity sha512-Ln0UQDlxH1BapMu3GPtf7CuYNwRZf2gwCuPqbyG6pB8WfmFpzqcy4xtAaAMUhnNqjMKTiCPZG2oMT3YSx8U2NA== glob-parent@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" + resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" integrity sha512-E8Ak/2+dZY6fnzlR7+ueWvhsH1SjHr4jjss4YS/h4py44jY9MhK/VFdaZJAWDz6BbL21KeteKxFSFpq8OS5gVA== dependencies: is-glob "^3.1.0" @@ -3383,21 +3561,21 @@ glob-parent@^3.1.0: glob-parent@^5.1.2: version "5.1.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== dependencies: is-glob "^4.0.1" glob-parent@^6.0.2: version "6.0.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== dependencies: is-glob "^4.0.3" glob-stream@^6.1.0: version "6.1.0" - resolved "https://registry.yarnpkg.com/glob-stream/-/glob-stream-6.1.0.tgz#7045c99413b3eb94888d83ab46d0b404cc7bdde4" + resolved "https://registry.npmjs.org/glob-stream/-/glob-stream-6.1.0.tgz#7045c99413b3eb94888d83ab46d0b404cc7bdde4" integrity sha512-uMbLGAP3S2aDOHUDfdoYcdIePUCfysbAd0IAoWVZbeGU/oNQ8asHVSshLDJUPWxfzj8zsCG7/XeHPHTtow0nsw== dependencies: extend "^3.0.0" @@ -3413,12 +3591,12 @@ glob-stream@^6.1.0: glob-to-regexp@^0.4.1: version "0.4.1" - resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" + resolved "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== glob-watcher@^5.0.3: version "5.0.5" - resolved "https://registry.yarnpkg.com/glob-watcher/-/glob-watcher-5.0.5.tgz#aa6bce648332924d9a8489be41e3e5c52d4186dc" + resolved "https://registry.npmjs.org/glob-watcher/-/glob-watcher-5.0.5.tgz#aa6bce648332924d9a8489be41e3e5c52d4186dc" integrity sha512-zOZgGGEHPklZNjZQaZ9f41i7F2YwE+tS5ZHrDhbBCk3stwahn5vQxnFmBJZHoYdusR6R1bLSXeGUy/BhctwKzw== dependencies: anymatch "^2.0.0" @@ -3431,7 +3609,7 @@ glob-watcher@^5.0.3: glob@10.2.7: version "10.2.7" - resolved "https://registry.yarnpkg.com/glob/-/glob-10.2.7.tgz#9dd2828cd5bc7bd861e7738d91e7113dda41d7d8" + resolved "https://registry.npmjs.org/glob/-/glob-10.2.7.tgz#9dd2828cd5bc7bd861e7738d91e7113dda41d7d8" integrity sha512-jTKehsravOJo8IJxUGfZILnkvVJM/MOfHRs8QcXolVef2zNI9Tqyy5+SeuOAZd3upViEZQLyFpQhYiHLrMUNmA== dependencies: foreground-child "^3.1.0" @@ -3442,7 +3620,7 @@ glob@10.2.7: glob@^7.1.1, glob@^7.1.3, glob@^7.1.4: version "7.2.3" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + resolved "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== dependencies: fs.realpath "^1.0.0" @@ -3454,7 +3632,7 @@ glob@^7.1.1, glob@^7.1.3, glob@^7.1.4: global-modules@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" + resolved "https://registry.npmjs.org/global-modules/-/global-modules-1.0.0.tgz#6d770f0eb523ac78164d72b5e71a8877265cc3ea" integrity sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg== dependencies: global-prefix "^1.0.1" @@ -3463,7 +3641,7 @@ global-modules@^1.0.0: global-prefix@^1.0.1: version "1.0.2" - resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe" + resolved "https://registry.npmjs.org/global-prefix/-/global-prefix-1.0.2.tgz#dbf743c6c14992593c655568cb66ed32c0122ebe" integrity sha512-5lsx1NUDHtSjfg0eHlmYvZKv8/nVqX4ckFbM+FrGcQ+04KWcWFo9P5MxPZYSzUvyzmdTbI7Eix8Q4IbELDqzKg== dependencies: expand-tilde "^2.0.2" @@ -3474,19 +3652,19 @@ global-prefix@^1.0.1: globals@^11.1.0: version "11.12.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + resolved "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== globals@^13.19.0: - version "13.20.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-13.20.0.tgz#ea276a1e508ffd4f1612888f9d1bad1e2717bf82" - integrity sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ== + version "13.21.0" + resolved "https://registry.npmjs.org/globals/-/globals-13.21.0.tgz#163aae12f34ef502f5153cfbdd3600f36c63c571" + integrity sha512-ybyme3s4yy/t/3s35bewwXKOf7cvzfreG2lH0lZl0JB7I4GxRP2ghxOK/Nb9EkRXdbBXZLfq/p/0W2JUONB/Gg== dependencies: type-fest "^0.20.2" globby@^11.1.0: version "11.1.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + resolved "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== dependencies: array-union "^2.1.0" @@ -3497,19 +3675,19 @@ globby@^11.1.0: slash "^3.0.0" globby@^13.1.2: - version "13.1.4" - resolved "https://registry.yarnpkg.com/globby/-/globby-13.1.4.tgz#2f91c116066bcec152465ba36e5caa4a13c01317" - integrity sha512-iui/IiiW+QrJ1X1hKH5qwlMQyv34wJAYwH1vrf8b9kBA4sNiif3gKsMHa+BrdnOpEudWjpotfa7LrTzB1ERS/g== + version "13.2.2" + resolved "https://registry.npmjs.org/globby/-/globby-13.2.2.tgz#63b90b1bf68619c2135475cbd4e71e66aa090592" + integrity sha512-Y1zNGV+pzQdh7H39l9zgB4PJqjRNqydvdYCDG4HFXM4XuvSaQQlEc91IU1yALL8gUTDomgBAfz3XJdmUS+oo0w== dependencies: dir-glob "^3.0.1" - fast-glob "^3.2.11" - ignore "^5.2.0" + fast-glob "^3.3.0" + ignore "^5.2.4" merge2 "^1.4.1" slash "^4.0.0" glogg@^1.0.0: version "1.0.2" - resolved "https://registry.yarnpkg.com/glogg/-/glogg-1.0.2.tgz#2d7dd702beda22eb3bffadf880696da6d846313f" + resolved "https://registry.npmjs.org/glogg/-/glogg-1.0.2.tgz#2d7dd702beda22eb3bffadf880696da6d846313f" integrity sha512-5mwUoSuBk44Y4EshyiqcH95ZntbDdTQqA3QYSrxmzj28Ai0vXBGMH1ApSANH14j2sIRtqCEyg6PfsuP7ElOEDA== dependencies: sparkles "^1.0.0" @@ -3549,24 +3727,31 @@ google-closure-compiler@20230802.0.0: google-closure-compiler-osx "^20230802.0.0" google-closure-compiler-windows "^20230802.0.0" +gopd@^1.0.1: + version "1.0.1" + resolved "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" + integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== + dependencies: + get-intrinsic "^1.1.3" + graceful-fs@^4.0.0, graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.10, graceful-fs@^4.2.4, graceful-fs@^4.2.9: version "4.2.11" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== grapheme-splitter@^1.0.4: version "1.0.4" - resolved "https://registry.yarnpkg.com/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz#9cf3a665c6247479896834af35cf1dbb4400767e" + resolved "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz#9cf3a665c6247479896834af35cf1dbb4400767e" integrity sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ== graphemer@^1.4.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" + resolved "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== gulp-cli@^2.2.0: version "2.3.0" - resolved "https://registry.yarnpkg.com/gulp-cli/-/gulp-cli-2.3.0.tgz#ec0d380e29e52aa45e47977f0d32e18fd161122f" + resolved "https://registry.npmjs.org/gulp-cli/-/gulp-cli-2.3.0.tgz#ec0d380e29e52aa45e47977f0d32e18fd161122f" integrity sha512-zzGBl5fHo0EKSXsHzjspp3y5CONegCm8ErO5Qh0UzFzk2y4tMvzLWhoDokADbarfZRL2pGpRp7yt6gfJX4ph7A== dependencies: ansi-colors "^1.0.1" @@ -3590,7 +3775,7 @@ gulp-cli@^2.2.0: gulp-esbuild@0.11.1: version "0.11.1" - resolved "https://registry.yarnpkg.com/gulp-esbuild/-/gulp-esbuild-0.11.1.tgz#85d0ebbacae844dbbae02bb6f9d4216e9b45cee7" + resolved "https://registry.npmjs.org/gulp-esbuild/-/gulp-esbuild-0.11.1.tgz#85d0ebbacae844dbbae02bb6f9d4216e9b45cee7" integrity sha512-+Lbc4NXBcSXtvvz9wIZNrxGfgF70NgBfSNhHi3o4jKD36KQxFVKRECU72ZsSqyy08qXojK6Cur5/S0Ds+2xpSg== dependencies: esbuild "^0.17.11" @@ -3599,7 +3784,7 @@ gulp-esbuild@0.11.1: gulp-json-transform@0.4.8: version "0.4.8" - resolved "https://registry.yarnpkg.com/gulp-json-transform/-/gulp-json-transform-0.4.8.tgz#fdbd19fd5327661ccdfc5977621f739ea86d557d" + resolved "https://registry.npmjs.org/gulp-json-transform/-/gulp-json-transform-0.4.8.tgz#fdbd19fd5327661ccdfc5977621f739ea86d557d" integrity sha512-FBVn1HqNf0hqsgrspA7Xz2i/HxRZPKnccMapKPfIE+PANuCmtmgMD2OfRLbqRGBb0Xwl1pE76sSct5gyF1zsfw== dependencies: ansi-colors "^1.0.1" @@ -3611,12 +3796,12 @@ gulp-json-transform@0.4.8: gulp-rename@2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/gulp-rename/-/gulp-rename-2.0.0.tgz#9bbc3962b0c0f52fc67cd5eaff6c223ec5b9cf6c" + resolved "https://registry.npmjs.org/gulp-rename/-/gulp-rename-2.0.0.tgz#9bbc3962b0c0f52fc67cd5eaff6c223ec5b9cf6c" integrity sha512-97Vba4KBzbYmR5VBs9mWmK+HwIf5mj+/zioxfZhOKeXtx5ZjBk57KFlePf5nxq9QsTtFl0ejnHE3zTC9MHXqyQ== gulp-replace@1.1.4: version "1.1.4" - resolved "https://registry.yarnpkg.com/gulp-replace/-/gulp-replace-1.1.4.tgz#06a0e9ee36f30e343c1e0a2dd760ec32c8a3d3b2" + resolved "https://registry.npmjs.org/gulp-replace/-/gulp-replace-1.1.4.tgz#06a0e9ee36f30e343c1e0a2dd760ec32c8a3d3b2" integrity sha512-SVSF7ikuWKhpAW4l4wapAqPPSToJoiNKsbDoUnRrSgwZHH7lH8pbPeQj1aOVYQrbZKhfSVBxVW+Py7vtulRktw== dependencies: "@types/node" "*" @@ -3627,7 +3812,7 @@ gulp-replace@1.1.4: gulp-sourcemaps@3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/gulp-sourcemaps/-/gulp-sourcemaps-3.0.0.tgz#2e154e1a2efed033c0e48013969e6f30337b2743" + resolved "https://registry.npmjs.org/gulp-sourcemaps/-/gulp-sourcemaps-3.0.0.tgz#2e154e1a2efed033c0e48013969e6f30337b2743" integrity sha512-RqvUckJkuYqy4VaIH60RMal4ZtG0IbQ6PXMNkNsshEGJ9cldUPRb/YCgboYae+CLAs1HQNb4ADTKCx65HInquQ== dependencies: "@gulp-sourcemaps/identity-map" "^2.0.1" @@ -3644,7 +3829,7 @@ gulp-sourcemaps@3.0.0: gulp-terser@2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/gulp-terser/-/gulp-terser-2.1.0.tgz#149b693a1adbde922807b60b844bb7351dafbde1" + resolved "https://registry.npmjs.org/gulp-terser/-/gulp-terser-2.1.0.tgz#149b693a1adbde922807b60b844bb7351dafbde1" integrity sha512-lQ3+JUdHDVISAlUIUSZ/G9Dz/rBQHxOiYDQ70IVWFQeh4b33TC1MCIU+K18w07PS3rq/CVc34aQO4SUbdaNMPQ== dependencies: plugin-error "^1.0.1" @@ -3654,7 +3839,7 @@ gulp-terser@2.1.0: gulp-typescript@5.0.1: version "5.0.1" - resolved "https://registry.yarnpkg.com/gulp-typescript/-/gulp-typescript-5.0.1.tgz#96c6565a6eb31e08c2aae1c857b1a079e6226d94" + resolved "https://registry.npmjs.org/gulp-typescript/-/gulp-typescript-5.0.1.tgz#96c6565a6eb31e08c2aae1c857b1a079e6226d94" integrity sha512-YuMMlylyJtUSHG1/wuSVTrZp60k1dMEFKYOvDf7OvbAJWrDtxxD4oZon4ancdWwzjj30ztiidhe4VXJniF0pIQ== dependencies: ansi-colors "^3.0.5" @@ -3666,7 +3851,7 @@ gulp-typescript@5.0.1: gulp-vinyl-size@1.1.4: version "1.1.4" - resolved "https://registry.yarnpkg.com/gulp-vinyl-size/-/gulp-vinyl-size-1.1.4.tgz#06b98a8aa9f91f420ea3068798800aa21f8ccdc1" + resolved "https://registry.npmjs.org/gulp-vinyl-size/-/gulp-vinyl-size-1.1.4.tgz#06b98a8aa9f91f420ea3068798800aa21f8ccdc1" integrity sha512-k8QTnUD69CnQnbpbXExFRtMDtQ9jIc5nZXYyVNB5Z+7xrZIO3hVS0zotoWwZawgtBLNLvAnm6JZ3zL+O1ysQ3A== dependencies: fancy-log "^2.0.0" @@ -3675,7 +3860,7 @@ gulp-vinyl-size@1.1.4: gulp@4.0.2: version "4.0.2" - resolved "https://registry.yarnpkg.com/gulp/-/gulp-4.0.2.tgz#543651070fd0f6ab0a0650c6a3e6ff5a7cb09caa" + resolved "https://registry.npmjs.org/gulp/-/gulp-4.0.2.tgz#543651070fd0f6ab0a0650c6a3e6ff5a7cb09caa" integrity sha512-dvEs27SCZt2ibF29xYgmnwwCYZxdxhQ/+LFWlbAW8y7jt68L/65402Lz3+CKy0Ov4rOs+NERmDq7YlZaDqUIfA== dependencies: glob-watcher "^5.0.3" @@ -3685,53 +3870,53 @@ gulp@4.0.2: gulplog@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/gulplog/-/gulplog-1.0.0.tgz#e28c4d45d05ecbbed818363ce8f9c5926229ffe5" + resolved "https://registry.npmjs.org/gulplog/-/gulplog-1.0.0.tgz#e28c4d45d05ecbbed818363ce8f9c5926229ffe5" integrity sha512-hm6N8nrm3Y08jXie48jsC55eCZz9mnb4OirAStEk2deqeyhXU3C1otDVh+ccttMuc1sBi6RX6ZJ720hs9RCvgw== dependencies: glogg "^1.0.0" gzip-size@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-6.0.0.tgz#065367fd50c239c0671cbcbad5be3e2eeb10e462" + resolved "https://registry.npmjs.org/gzip-size/-/gzip-size-6.0.0.tgz#065367fd50c239c0671cbcbad5be3e2eeb10e462" integrity sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q== dependencies: duplexer "^0.1.2" hard-rejection@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/hard-rejection/-/hard-rejection-2.1.0.tgz#1c6eda5c1685c63942766d79bb40ae773cecd883" + resolved "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz#1c6eda5c1685c63942766d79bb40ae773cecd883" integrity sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA== has-flag@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + resolved "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== has-flag@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + resolved "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== has-property-descriptors@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861" + resolved "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861" integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== dependencies: get-intrinsic "^1.1.1" has-proto@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.1.tgz#1885c1305538958aff469fef37937c22795408e0" + resolved "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz#1885c1305538958aff469fef37937c22795408e0" integrity sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg== has-symbols@^1.0.3: version "1.0.3" - resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + resolved "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== has-value@^0.3.1: version "0.3.1" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" + resolved "https://registry.npmjs.org/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" integrity sha512-gpG936j8/MzaeID5Yif+577c17TxaDmhuyVgSwtnL/q8UUTySg8Mecb+8Cf1otgLoD7DDH75axp86ER7LFsf3Q== dependencies: get-value "^2.0.3" @@ -3740,7 +3925,7 @@ has-value@^0.3.1: has-value@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" + resolved "https://registry.npmjs.org/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" integrity sha512-IBXk4GTsLYdQ7Rvt+GRBrFSVEkmuOUy4re0Xjd9kJSUQpnTrWR4/y9RpfexN9vkAPMFuQoeWKwqzPozRTlasGw== dependencies: get-value "^2.0.6" @@ -3749,12 +3934,12 @@ has-value@^1.0.0: has-values@^0.1.4: version "0.1.4" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" + resolved "https://registry.npmjs.org/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" integrity sha512-J8S0cEdWuQbqD9//tlZxiMuMNmxB8PlEwvYwuxsTmR1G5RXUePEX/SJn7aD0GMLieuZYSwNH0cQuJGwnYunXRQ== has-values@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" + resolved "https://registry.npmjs.org/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" integrity sha512-ODYZC64uqzmtfGMEAX/FvZiRyWLpAC3vYnNunURUnkGVTS+mI0smVsWaPydRBsE3g+ok7h960jChO8mFcWlHaQ== dependencies: is-number "^3.0.0" @@ -3762,53 +3947,53 @@ has-values@^1.0.0: has@^1.0.3: version "1.0.3" - resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + resolved "https://registry.npmjs.org/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== dependencies: function-bind "^1.1.1" homedir-polyfill@^1.0.1: version "1.0.3" - resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" + resolved "https://registry.npmjs.org/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== dependencies: parse-passwd "^1.0.0" hosted-git-info@^2.1.4: version "2.8.9" - resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" + resolved "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== hosted-git-info@^4.0.1: version "4.1.0" - resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-4.1.0.tgz#827b82867e9ff1c8d0c4d9d53880397d2c86d224" + resolved "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz#827b82867e9ff1c8d0c4d9d53880397d2c86d224" integrity sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA== dependencies: lru-cache "^6.0.0" html-escaper@^2.0.0: version "2.0.2" - resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + resolved "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== human-signals@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" + resolved "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== hyperdyperid@^1.2.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/hyperdyperid/-/hyperdyperid-1.2.0.tgz#59668d323ada92228d2a869d3e474d5a33b69e6b" + resolved "https://registry.npmjs.org/hyperdyperid/-/hyperdyperid-1.2.0.tgz#59668d323ada92228d2a869d3e474d5a33b69e6b" integrity sha512-Y93lCzHYgGWdrJ66yIktxiaGULYc6oGiABxhcO5AufBeOyoIdZF7bIfLaOrbM0iGIOXQQgxxRrFEnb+Y6w1n4A== -ignore@^5.2.0: +ignore@^5.2.0, ignore@^5.2.4: version "5.2.4" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" + resolved "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== -import-fresh@^3.0.0, import-fresh@^3.2.1: +import-fresh@^3.2.1: version "3.3.0" - resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + resolved "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== dependencies: parent-module "^1.0.0" @@ -3816,7 +4001,7 @@ import-fresh@^3.0.0, import-fresh@^3.2.1: import-local@^3.0.2: version "3.1.0" - resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" + resolved "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== dependencies: pkg-dir "^4.2.0" @@ -3824,22 +4009,22 @@ import-local@^3.0.2: imurmurhash@^0.1.4: version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + resolved "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== indent-string@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" + resolved "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== indent-string@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-5.0.0.tgz#4fd2980fccaf8622d14c64d694f4cf33c81951a5" + resolved "https://registry.npmjs.org/indent-string/-/indent-string-5.0.0.tgz#4fd2980fccaf8622d14c64d694f4cf33c81951a5" integrity sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg== inflight@^1.0.4: version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + resolved "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== dependencies: once "^1.3.0" @@ -3847,27 +4032,27 @@ inflight@^1.0.4: inherits@2, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3: version "2.0.4" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== ini@^1.3.4: version "1.3.8" - resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" + resolved "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== interpret@^1.4.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.4.0.tgz#665ab8bc4da27a774a40584e812e3e0fa45b1a1e" + resolved "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz#665ab8bc4da27a774a40584e812e3e0fa45b1a1e" integrity sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA== invert-kv@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-1.0.0.tgz#104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6" + resolved "https://registry.npmjs.org/invert-kv/-/invert-kv-1.0.0.tgz#104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6" integrity sha512-xgs2NH9AE66ucSq4cNG1nhSFghr5l6tdL15Pk+jl46bmmBapgoaY/AacXyaDznAqmGL99TiLSQgO/XazFSKYeQ== is-absolute@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/is-absolute/-/is-absolute-1.0.0.tgz#395e1ae84b11f26ad1795e73c17378e48a301576" + resolved "https://registry.npmjs.org/is-absolute/-/is-absolute-1.0.0.tgz#395e1ae84b11f26ad1795e73c17378e48a301576" integrity sha512-dOWoqflvcydARa360Gvv18DZ/gRuHKi2NU/wU5X1ZFzdYfH29nkiNZsF3mp4OJ3H4yo9Mx8A/uAGNzpzPN3yBA== dependencies: is-relative "^1.0.0" @@ -3875,73 +4060,73 @@ is-absolute@^1.0.0: is-accessor-descriptor@^0.1.6: version "0.1.6" - resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" + resolved "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" integrity sha512-e1BM1qnDbMRG3ll2U9dSK0UMHuWOs3pY3AtcFsmvwPtKL3MML/Q86i+GilLfvqEs4GW+ExB91tQ3Ig9noDIZ+A== dependencies: kind-of "^3.0.2" is-accessor-descriptor@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" + resolved "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== dependencies: kind-of "^6.0.0" is-arrayish@^0.2.1: version "0.2.1" - resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + resolved "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== is-binary-path@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" + resolved "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" integrity sha512-9fRVlXc0uCxEDj1nQzaWONSpbTfx0FmJfzHF7pwlI8DkWGoHBBea4Pg5Ky0ojwwxQmnSifgbKkI06Qv0Ljgj+Q== dependencies: binary-extensions "^1.0.0" is-buffer@^1.1.5: version "1.1.6" - resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" + resolved "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== is-builtin-module@^3.2.1: version "3.2.1" - resolved "https://registry.yarnpkg.com/is-builtin-module/-/is-builtin-module-3.2.1.tgz#f03271717d8654cfcaf07ab0463faa3571581169" + resolved "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-3.2.1.tgz#f03271717d8654cfcaf07ab0463faa3571581169" integrity sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A== dependencies: builtin-modules "^3.3.0" is-ci@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c" + resolved "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c" integrity sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w== dependencies: ci-info "^2.0.0" -is-core-module@^2.11.0, is-core-module@^2.5.0: - version "2.12.1" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.12.1.tgz#0c0b6885b6f80011c71541ce15c8d66cf5a4f9fd" - integrity sha512-Q4ZuBAe2FUsKtyQJoQHlvP8OvBERxO3jEmy1I7hcRXcJBGGHFh/aJBswbXuS9sgrDH2QUO8ilkwNPHvHMd8clg== +is-core-module@^2.13.0, is-core-module@^2.5.0: + version "2.13.0" + resolved "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.0.tgz#bb52aa6e2cbd49a30c2ba68c42bf3435ba6072db" + integrity sha512-Z7dk6Qo8pOCp3l4tsX2C5ZVas4V+UxwQodwZhLopL91TX8UyyHEXafPcyoeeWuLrwzHcr3igO78wNLwHJHsMCQ== dependencies: has "^1.0.3" is-data-descriptor@^0.1.4: version "0.1.4" - resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" + resolved "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" integrity sha512-+w9D5ulSoBNlmw9OHn3U2v51SyoCd0he+bB3xMl62oijhrspxowjU+AIcDY0N3iEJbUEkB15IlMASQsxYigvXg== dependencies: kind-of "^3.0.2" is-data-descriptor@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" + resolved "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== dependencies: kind-of "^6.0.0" is-descriptor@^0.1.0: version "0.1.6" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" + resolved "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== dependencies: is-accessor-descriptor "^0.1.6" @@ -3950,7 +4135,7 @@ is-descriptor@^0.1.0: is-descriptor@^1.0.0, is-descriptor@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" + resolved "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== dependencies: is-accessor-descriptor "^1.0.0" @@ -3959,180 +4144,180 @@ is-descriptor@^1.0.0, is-descriptor@^1.0.2: is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" + resolved "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" integrity sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw== is-extendable@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" + resolved "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== dependencies: is-plain-object "^2.0.4" is-extglob@^2.1.0, is-extglob@^2.1.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + resolved "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== is-fullwidth-code-point@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" + resolved "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" integrity sha512-1pqUqRjkhPJ9miNq9SwMfdvi6lBJcd6eFxvfaivQhaH3SgisfiuudvFntdKOmxuee/77l+FPjKrQjWvmPjWrRw== dependencies: number-is-nan "^1.0.0" is-fullwidth-code-point@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + resolved "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== is-generator-fn@^2.0.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" + resolved "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== is-glob@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" + resolved "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" integrity sha512-UFpDDrPgM6qpnFNI+rh/p3bUaq9hKLZN8bMUWzxmcnZVS3omf4IPK+BrewlnWjO1WmUsMYuSjKh4UJuV4+Lqmw== dependencies: is-extglob "^2.1.0" is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3: version "4.0.3" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + resolved "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== dependencies: is-extglob "^2.1.1" is-module@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591" + resolved "https://registry.npmjs.org/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591" integrity sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g== is-negated-glob@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/is-negated-glob/-/is-negated-glob-1.0.0.tgz#6910bca5da8c95e784b5751b976cf5a10fee36d2" + resolved "https://registry.npmjs.org/is-negated-glob/-/is-negated-glob-1.0.0.tgz#6910bca5da8c95e784b5751b976cf5a10fee36d2" integrity sha512-czXVVn/QEmgvej1f50BZ648vUI+em0xqMq2Sn+QncCLN4zj1UAxlT+kw/6ggQTOaZPd1HqKQGEqbpQVtJucWug== is-number@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" + resolved "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" integrity sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg== dependencies: kind-of "^3.0.2" is-number@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-4.0.0.tgz#0026e37f5454d73e356dfe6564699867c6a7f0ff" + resolved "https://registry.npmjs.org/is-number/-/is-number-4.0.0.tgz#0026e37f5454d73e356dfe6564699867c6a7f0ff" integrity sha512-rSklcAIlf1OmFdyAqbnWTLVelsQ58uvZ66S/ZyawjWqIviTWCjg2PzVGw8WUA+nNuPTqb4wgA+NszrJ+08LlgQ== is-number@^7.0.0: version "7.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + resolved "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== is-path-cwd@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-3.0.0.tgz#889b41e55c8588b1eb2a96a61d05740a674521c7" + resolved "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-3.0.0.tgz#889b41e55c8588b1eb2a96a61d05740a674521c7" integrity sha512-kyiNFFLU0Ampr6SDZitD/DwUo4Zs1nSdnygUBqsu3LooL00Qvb5j+UnvApUn/TTj1J3OuE6BTdQ5rudKmU2ZaA== is-path-inside@^3.0.3: version "3.0.3" - resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" + resolved "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== is-path-inside@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-4.0.0.tgz#805aeb62c47c1b12fc3fd13bfb3ed1e7430071db" + resolved "https://registry.npmjs.org/is-path-inside/-/is-path-inside-4.0.0.tgz#805aeb62c47c1b12fc3fd13bfb3ed1e7430071db" integrity sha512-lJJV/5dYS+RcL8uQdBDW9c9uWFLLBNRyFhnAKXw5tVqLlKZ4RMGZKv+YQ/IA3OhD+RpbJa1LLFM1FQPGyIXvOA== is-plain-obj@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" + resolved "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" integrity sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg== is-plain-object@^2.0.1, is-plain-object@^2.0.3, is-plain-object@^2.0.4: version "2.0.4" - resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" + resolved "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== dependencies: isobject "^3.0.1" is-plain-object@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-5.0.0.tgz#4427f50ab3429e9025ea7d52e9043a9ef4159344" + resolved "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz#4427f50ab3429e9025ea7d52e9043a9ef4159344" integrity sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q== is-promise@^2.2.2: version "2.2.2" - resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.2.2.tgz#39ab959ccbf9a774cf079f7b40c7a26f763135f1" + resolved "https://registry.npmjs.org/is-promise/-/is-promise-2.2.2.tgz#39ab959ccbf9a774cf079f7b40c7a26f763135f1" integrity sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ== is-relative@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/is-relative/-/is-relative-1.0.0.tgz#a1bb6935ce8c5dba1e8b9754b9b2dcc020e2260d" + resolved "https://registry.npmjs.org/is-relative/-/is-relative-1.0.0.tgz#a1bb6935ce8c5dba1e8b9754b9b2dcc020e2260d" integrity sha512-Kw/ReK0iqwKeu0MITLFuj0jbPAmEiOsIwyIXvvbfa6QfmN9pkD1M+8pdk7Rl/dTKbH34/XBFMbgD4iMJhLQbGA== dependencies: is-unc-path "^1.0.0" is-stream@^2.0.0: version "2.0.1" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + resolved "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== is-unc-path@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/is-unc-path/-/is-unc-path-1.0.0.tgz#d731e8898ed090a12c352ad2eaed5095ad322c9d" + resolved "https://registry.npmjs.org/is-unc-path/-/is-unc-path-1.0.0.tgz#d731e8898ed090a12c352ad2eaed5095ad322c9d" integrity sha512-mrGpVd0fs7WWLfVsStvgF6iEJnbjDFZh9/emhRDcGWTduTfNHd9CHeUwH3gYIjdbwo4On6hunkztwOaAw0yllQ== dependencies: unc-path-regex "^0.1.2" is-utf8@^0.2.0, is-utf8@^0.2.1: version "0.2.1" - resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" + resolved "https://registry.npmjs.org/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" integrity sha512-rMYPYvCzsXywIsldgLaSoPlw5PfoB/ssr7hY4pLfcodrA5M/eArza1a9VmTiNIBNMjOGr1Ow9mTyU2o69U6U9Q== is-valid-glob@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/is-valid-glob/-/is-valid-glob-1.0.0.tgz#29bf3eff701be2d4d315dbacc39bc39fe8f601aa" + resolved "https://registry.npmjs.org/is-valid-glob/-/is-valid-glob-1.0.0.tgz#29bf3eff701be2d4d315dbacc39bc39fe8f601aa" integrity sha512-AhiROmoEFDSsjx8hW+5sGwgKVIORcXnrlAx/R0ZSeaPw70Vw0CqkGBBhHGL58Uox2eXnU1AnvXJl1XlyedO5bA== is-windows@^1.0.1, is-windows@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" + resolved "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== isarray@1.0.0, isarray@~1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + resolved "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== isexe@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== isobject@^2.0.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" + resolved "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" integrity sha512-+OUdGJlgjOBZDfxnDjYYG6zp487z0JGNQq3cYQYg5f5hKR+syHMsaztzGeml/4kGG55CSpKSpWTY+jYGgsHLgA== dependencies: isarray "1.0.0" isobject@^3.0.0, isobject@^3.0.1: version "3.0.1" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" + resolved "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: version "3.2.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" + resolved "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== -istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: +istanbul-lib-instrument@^5.0.4: version "5.2.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" + resolved "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" integrity sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== dependencies: "@babel/core" "^7.12.3" @@ -4141,18 +4326,29 @@ istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: istanbul-lib-coverage "^3.2.0" semver "^6.3.0" +istanbul-lib-instrument@^6.0.0: + version "6.0.0" + resolved "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.0.tgz#7a8af094cbfff1d5bb280f62ce043695ae8dd5b8" + integrity sha512-x58orMzEVfzPUKqlbLd1hXCnySCxKdDKa6Rjg97CwuLLRI4g3FHTdnExu1OqffVFay6zeMW+T6/DowFLndWnIw== + dependencies: + "@babel/core" "^7.12.3" + "@babel/parser" "^7.14.7" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.2.0" + semver "^7.5.4" + istanbul-lib-report@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" - integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== + version "3.0.1" + resolved "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz#908305bac9a5bd175ac6a74489eafd0fc2445a7d" + integrity sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw== dependencies: istanbul-lib-coverage "^3.0.0" - make-dir "^3.0.0" + make-dir "^4.0.0" supports-color "^7.1.0" istanbul-lib-source-maps@^4.0.0: version "4.0.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" + resolved "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== dependencies: debug "^4.1.1" @@ -4160,16 +4356,16 @@ istanbul-lib-source-maps@^4.0.0: source-map "^0.6.1" istanbul-reports@^3.1.3: - version "3.1.5" - resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.5.tgz#cc9a6ab25cb25659810e4785ed9d9fb742578bae" - integrity sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w== + version "3.1.6" + resolved "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.6.tgz#2544bcab4768154281a2f0870471902704ccaa1a" + integrity sha512-TLgnMkKg3iTDsQ9PbPTdpfAK2DzjF9mqUG7RMgcQl8oFjad8ob4laGxv5XV5U9MAfx8D6tSJiUyuAwzLicaxlg== dependencies: html-escaper "^2.0.0" istanbul-lib-report "^3.0.0" istextorbinary@^3.0.0: version "3.3.0" - resolved "https://registry.yarnpkg.com/istextorbinary/-/istextorbinary-3.3.0.tgz#06b1c57d948da11461bd237c00ce09e9902964f2" + resolved "https://registry.npmjs.org/istextorbinary/-/istextorbinary-3.3.0.tgz#06b1c57d948da11461bd237c00ce09e9902964f2" integrity sha512-Tvq1W6NAcZeJ8op+Hq7tdZ434rqnMx4CCZ7H0ff83uEloDvVbqAwaMTZcafKGJT0VHkYzuXUiCY4hlXQg6WfoQ== dependencies: binaryextensions "^2.2.0" @@ -4177,332 +4373,332 @@ istextorbinary@^3.0.0: ix@5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/ix/-/ix-5.0.0.tgz#b9e292f79b1876bbf696809fe86e42930bdbfcd4" + resolved "https://registry.npmjs.org/ix/-/ix-5.0.0.tgz#b9e292f79b1876bbf696809fe86e42930bdbfcd4" integrity sha512-6LyyrHnvNrSy5pKtW/KA+KKusHrB223aBJCJlIGPN7QBfDkEEtNrAkAz9lLLShIcdJntq6BiPCHuKaCM/9wwXw== dependencies: "@types/node" "^13.7.4" tslib "^2.3.0" jackspeak@^2.0.3: - version "2.2.1" - resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-2.2.1.tgz#655e8cf025d872c9c03d3eb63e8f0c024fef16a6" - integrity sha512-MXbxovZ/Pm42f6cDIDkl3xpwv1AGwObKwfmjs2nQePiy85tP3fatofl3FC1aBsOtP/6fq5SbtgHwWcMsLP+bDw== + version "2.3.3" + resolved "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.3.tgz#95e4cbcc03b3eb357bf6bcce14a903fb3d1151e1" + integrity sha512-R2bUw+kVZFS/h1AZqBKrSgDmdmjApzgY0AlCPumopFiAlbUxE2gf+SCuBzQ0cP5hHmUmFYF5yw55T97Th5Kstg== dependencies: "@isaacs/cliui" "^8.0.2" optionalDependencies: "@pkgjs/parseargs" "^0.11.0" -jest-changed-files@^29.5.0: - version "29.5.0" - resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-29.5.0.tgz#e88786dca8bf2aa899ec4af7644e16d9dcf9b23e" - integrity sha512-IFG34IUMUaNBIxjQXF/iu7g6EcdMrGRRxaUSw92I/2g2YC6vCdTltl4nHvt7Ci5nSJwXIkCu8Ka1DKF+X7Z1Ag== +jest-changed-files@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-29.7.0.tgz#1c06d07e77c78e1585d020424dedc10d6e17ac3a" + integrity sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w== dependencies: execa "^5.0.0" + jest-util "^29.7.0" p-limit "^3.1.0" -jest-circus@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-29.6.2.tgz#1e6ffca60151ac66cad63fce34f443f6b5bb4258" - integrity sha512-G9mN+KOYIUe2sB9kpJkO9Bk18J4dTDArNFPwoZ7WKHKel55eKIS/u2bLthxgojwlf9NLCVQfgzM/WsOVvoC6Fw== +jest-circus@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-circus/-/jest-circus-29.7.0.tgz#b6817a45fcc835d8b16d5962d0c026473ee3668a" + integrity sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw== dependencies: - "@jest/environment" "^29.6.2" - "@jest/expect" "^29.6.2" - "@jest/test-result" "^29.6.2" - "@jest/types" "^29.6.1" + "@jest/environment" "^29.7.0" + "@jest/expect" "^29.7.0" + "@jest/test-result" "^29.7.0" + "@jest/types" "^29.6.3" "@types/node" "*" chalk "^4.0.0" co "^4.6.0" dedent "^1.0.0" is-generator-fn "^2.0.0" - jest-each "^29.6.2" - jest-matcher-utils "^29.6.2" - jest-message-util "^29.6.2" - jest-runtime "^29.6.2" - jest-snapshot "^29.6.2" - jest-util "^29.6.2" + jest-each "^29.7.0" + jest-matcher-utils "^29.7.0" + jest-message-util "^29.7.0" + jest-runtime "^29.7.0" + jest-snapshot "^29.7.0" + jest-util "^29.7.0" p-limit "^3.1.0" - pretty-format "^29.6.2" + pretty-format "^29.7.0" pure-rand "^6.0.0" slash "^3.0.0" stack-utils "^2.0.3" jest-cli@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-29.6.2.tgz#edb381763398d1a292cd1b636a98bfa5644b8fda" - integrity sha512-TT6O247v6dCEX2UGHGyflMpxhnrL0DNqP2fRTKYm3nJJpCTfXX3GCMQPGFjXDoj0i5/Blp3jriKXFgdfmbYB6Q== + version "29.7.0" + resolved "https://registry.npmjs.org/jest-cli/-/jest-cli-29.7.0.tgz#5592c940798e0cae677eec169264f2d839a37995" + integrity sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg== dependencies: - "@jest/core" "^29.6.2" - "@jest/test-result" "^29.6.2" - "@jest/types" "^29.6.1" + "@jest/core" "^29.7.0" + "@jest/test-result" "^29.7.0" + "@jest/types" "^29.6.3" chalk "^4.0.0" + create-jest "^29.7.0" exit "^0.1.2" - graceful-fs "^4.2.9" import-local "^3.0.2" - jest-config "^29.6.2" - jest-util "^29.6.2" - jest-validate "^29.6.2" - prompts "^2.0.1" + jest-config "^29.7.0" + jest-util "^29.7.0" + jest-validate "^29.7.0" yargs "^17.3.1" -jest-config@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-29.6.2.tgz#c68723f06b31ca5e63030686e604727d406cd7c3" - integrity sha512-VxwFOC8gkiJbuodG9CPtMRjBUNZEHxwfQXmIudSTzFWxaci3Qub1ddTRbFNQlD/zUeaifLndh/eDccFX4wCMQw== +jest-config@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-config/-/jest-config-29.7.0.tgz#bcbda8806dbcc01b1e316a46bb74085a84b0245f" + integrity sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ== dependencies: "@babel/core" "^7.11.6" - "@jest/test-sequencer" "^29.6.2" - "@jest/types" "^29.6.1" - babel-jest "^29.6.2" + "@jest/test-sequencer" "^29.7.0" + "@jest/types" "^29.6.3" + babel-jest "^29.7.0" chalk "^4.0.0" ci-info "^3.2.0" deepmerge "^4.2.2" glob "^7.1.3" graceful-fs "^4.2.9" - jest-circus "^29.6.2" - jest-environment-node "^29.6.2" - jest-get-type "^29.4.3" - jest-regex-util "^29.4.3" - jest-resolve "^29.6.2" - jest-runner "^29.6.2" - jest-util "^29.6.2" - jest-validate "^29.6.2" + jest-circus "^29.7.0" + jest-environment-node "^29.7.0" + jest-get-type "^29.6.3" + jest-regex-util "^29.6.3" + jest-resolve "^29.7.0" + jest-runner "^29.7.0" + jest-util "^29.7.0" + jest-validate "^29.7.0" micromatch "^4.0.4" parse-json "^5.2.0" - pretty-format "^29.6.2" + pretty-format "^29.7.0" slash "^3.0.0" strip-json-comments "^3.1.1" -jest-diff@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-29.6.2.tgz#c36001e5543e82a0805051d3ceac32e6825c1c46" - integrity sha512-t+ST7CB9GX5F2xKwhwCf0TAR17uNDiaPTZnVymP9lw0lssa9vG+AFyDZoeIHStU3WowFFwT+ky+er0WVl2yGhA== +jest-diff@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-diff/-/jest-diff-29.7.0.tgz#017934a66ebb7ecf6f205e84699be10afd70458a" + integrity sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw== dependencies: chalk "^4.0.0" - diff-sequences "^29.4.3" - jest-get-type "^29.4.3" - pretty-format "^29.6.2" + diff-sequences "^29.6.3" + jest-get-type "^29.6.3" + pretty-format "^29.7.0" -jest-docblock@^29.4.3: - version "29.4.3" - resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-29.4.3.tgz#90505aa89514a1c7dceeac1123df79e414636ea8" - integrity sha512-fzdTftThczeSD9nZ3fzA/4KkHtnmllawWrXO69vtI+L9WjEIuXWs4AmyME7lN5hU7dB0sHhuPfcKofRsUb/2Fg== +jest-docblock@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-docblock/-/jest-docblock-29.7.0.tgz#8fddb6adc3cdc955c93e2a87f61cfd350d5d119a" + integrity sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g== dependencies: detect-newline "^3.0.0" -jest-each@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-29.6.2.tgz#c9e4b340bcbe838c73adf46b76817b15712d02ce" - integrity sha512-MsrsqA0Ia99cIpABBc3izS1ZYoYfhIy0NNWqPSE0YXbQjwchyt6B1HD2khzyPe1WiJA7hbxXy77ZoUQxn8UlSw== +jest-each@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-each/-/jest-each-29.7.0.tgz#162a9b3f2328bdd991beaabffbb74745e56577d1" + integrity sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ== dependencies: - "@jest/types" "^29.6.1" + "@jest/types" "^29.6.3" chalk "^4.0.0" - jest-get-type "^29.4.3" - jest-util "^29.6.2" - pretty-format "^29.6.2" - -jest-environment-node@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-29.6.2.tgz#a9ea2cabff39b08eca14ccb32c8ceb924c8bb1ad" - integrity sha512-YGdFeZ3T9a+/612c5mTQIllvWkddPbYcN2v95ZH24oWMbGA4GGS2XdIF92QMhUhvrjjuQWYgUGW2zawOyH63MQ== - dependencies: - "@jest/environment" "^29.6.2" - "@jest/fake-timers" "^29.6.2" - "@jest/types" "^29.6.1" + jest-get-type "^29.6.3" + jest-util "^29.7.0" + pretty-format "^29.7.0" + +jest-environment-node@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-29.7.0.tgz#0b93e111dda8ec120bc8300e6d1fb9576e164376" + integrity sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw== + dependencies: + "@jest/environment" "^29.7.0" + "@jest/fake-timers" "^29.7.0" + "@jest/types" "^29.6.3" "@types/node" "*" - jest-mock "^29.6.2" - jest-util "^29.6.2" + jest-mock "^29.7.0" + jest-util "^29.7.0" -jest-get-type@^29.4.3: - version "29.4.3" - resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-29.4.3.tgz#1ab7a5207c995161100b5187159ca82dd48b3dd5" - integrity sha512-J5Xez4nRRMjk8emnTpWrlkyb9pfRQQanDrvWHhsR1+VUfbwxi30eVcZFlcdGInRibU4G5LwHXpI7IRHU0CY+gg== +jest-get-type@^29.6.3: + version "29.6.3" + resolved "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.6.3.tgz#36f499fdcea197c1045a127319c0481723908fd1" + integrity sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw== -jest-haste-map@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-29.6.2.tgz#298c25ea5255cfad8b723179d4295cf3a50a70d1" - integrity sha512-+51XleTDAAysvU8rT6AnS1ZJ+WHVNqhj1k6nTvN2PYP+HjU3kqlaKQ1Lnw3NYW3bm2r8vq82X0Z1nDDHZMzHVA== +jest-haste-map@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.7.0.tgz#3c2396524482f5a0506376e6c858c3bbcc17b104" + integrity sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA== dependencies: - "@jest/types" "^29.6.1" + "@jest/types" "^29.6.3" "@types/graceful-fs" "^4.1.3" "@types/node" "*" anymatch "^3.0.3" fb-watchman "^2.0.0" graceful-fs "^4.2.9" - jest-regex-util "^29.4.3" - jest-util "^29.6.2" - jest-worker "^29.6.2" + jest-regex-util "^29.6.3" + jest-util "^29.7.0" + jest-worker "^29.7.0" micromatch "^4.0.4" walker "^1.0.8" optionalDependencies: fsevents "^2.3.2" -jest-leak-detector@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-29.6.2.tgz#e2b307fee78cab091c37858a98c7e1d73cdf5b38" - integrity sha512-aNqYhfp5uYEO3tdWMb2bfWv6f0b4I0LOxVRpnRLAeque2uqOVVMLh6khnTcE2qJ5wAKop0HcreM1btoysD6bPQ== +jest-leak-detector@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz#5b7ec0dadfdfec0ca383dc9aa016d36b5ea4c728" + integrity sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw== dependencies: - jest-get-type "^29.4.3" - pretty-format "^29.6.2" + jest-get-type "^29.6.3" + pretty-format "^29.7.0" -jest-matcher-utils@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-29.6.2.tgz#39de0be2baca7a64eacb27291f0bd834fea3a535" - integrity sha512-4LiAk3hSSobtomeIAzFTe+N8kL6z0JtF3n6I4fg29iIW7tt99R7ZcIFW34QkX+DuVrf+CUe6wuVOpm7ZKFJzZQ== +jest-matcher-utils@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz#ae8fec79ff249fd592ce80e3ee474e83a6c44f12" + integrity sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g== dependencies: chalk "^4.0.0" - jest-diff "^29.6.2" - jest-get-type "^29.4.3" - pretty-format "^29.6.2" + jest-diff "^29.7.0" + jest-get-type "^29.6.3" + pretty-format "^29.7.0" -jest-message-util@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-29.6.2.tgz#af7adc2209c552f3f5ae31e77cf0a261f23dc2bb" - integrity sha512-vnIGYEjoPSuRqV8W9t+Wow95SDp6KPX2Uf7EoeG9G99J2OVh7OSwpS4B6J0NfpEIpfkBNHlBZpA2rblEuEFhZQ== +jest-message-util@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.7.0.tgz#8bc392e204e95dfe7564abbe72a404e28e51f7f3" + integrity sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w== dependencies: "@babel/code-frame" "^7.12.13" - "@jest/types" "^29.6.1" + "@jest/types" "^29.6.3" "@types/stack-utils" "^2.0.0" chalk "^4.0.0" graceful-fs "^4.2.9" micromatch "^4.0.4" - pretty-format "^29.6.2" + pretty-format "^29.7.0" slash "^3.0.0" stack-utils "^2.0.3" -jest-mock@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-29.6.2.tgz#ef9c9b4d38c34a2ad61010a021866dad41ce5e00" - integrity sha512-hoSv3lb3byzdKfwqCuT6uTscan471GUECqgNYykg6ob0yiAw3zYc7OrPnI9Qv8Wwoa4lC7AZ9hyS4AiIx5U2zg== +jest-mock@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-mock/-/jest-mock-29.7.0.tgz#4e836cf60e99c6fcfabe9f99d017f3fdd50a6347" + integrity sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw== dependencies: - "@jest/types" "^29.6.1" + "@jest/types" "^29.6.3" "@types/node" "*" - jest-util "^29.6.2" + jest-util "^29.7.0" jest-pnp-resolver@^1.2.2: version "1.2.3" - resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz#930b1546164d4ad5937d5540e711d4d38d4cad2e" + resolved "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz#930b1546164d4ad5937d5540e711d4d38d4cad2e" integrity sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w== -jest-regex-util@^29.4.3: - version "29.4.3" - resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-29.4.3.tgz#a42616141e0cae052cfa32c169945d00c0aa0bb8" - integrity sha512-O4FglZaMmWXbGHSQInfXewIsd1LMn9p3ZXB/6r4FOkyhX2/iP/soMG98jGvk/A3HAN78+5VWcBGO0BJAPRh4kg== +jest-regex-util@^29.6.3: + version "29.6.3" + resolved "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.6.3.tgz#4a556d9c776af68e1c5f48194f4d0327d24e8a52" + integrity sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg== -jest-resolve-dependencies@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-29.6.2.tgz#36435269b6672c256bcc85fb384872c134cc4cf2" - integrity sha512-LGqjDWxg2fuQQm7ypDxduLu/m4+4Lb4gczc13v51VMZbVP5tSBILqVx8qfWcsdP8f0G7aIqByIALDB0R93yL+w== +jest-resolve-dependencies@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz#1b04f2c095f37fc776ff40803dc92921b1e88428" + integrity sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA== dependencies: - jest-regex-util "^29.4.3" - jest-snapshot "^29.6.2" + jest-regex-util "^29.6.3" + jest-snapshot "^29.7.0" -jest-resolve@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-29.6.2.tgz#f18405fe4b50159b7b6d85e81f6a524d22afb838" - integrity sha512-G/iQUvZWI5e3SMFssc4ug4dH0aZiZpsDq9o1PtXTV1210Ztyb2+w+ZgQkB3iOiC5SmAEzJBOHWz6Hvrd+QnNPw== +jest-resolve@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-resolve/-/jest-resolve-29.7.0.tgz#64d6a8992dd26f635ab0c01e5eef4399c6bcbc30" + integrity sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA== dependencies: chalk "^4.0.0" graceful-fs "^4.2.9" - jest-haste-map "^29.6.2" + jest-haste-map "^29.7.0" jest-pnp-resolver "^1.2.2" - jest-util "^29.6.2" - jest-validate "^29.6.2" + jest-util "^29.7.0" + jest-validate "^29.7.0" resolve "^1.20.0" resolve.exports "^2.0.0" slash "^3.0.0" -jest-runner@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-29.6.2.tgz#89e8e32a8fef24781a7c4c49cd1cb6358ac7fc01" - integrity sha512-wXOT/a0EspYgfMiYHxwGLPCZfC0c38MivAlb2lMEAlwHINKemrttu1uSbcGbfDV31sFaPWnWJPmb2qXM8pqZ4w== +jest-runner@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-runner/-/jest-runner-29.7.0.tgz#809af072d408a53dcfd2e849a4c976d3132f718e" + integrity sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ== dependencies: - "@jest/console" "^29.6.2" - "@jest/environment" "^29.6.2" - "@jest/test-result" "^29.6.2" - "@jest/transform" "^29.6.2" - "@jest/types" "^29.6.1" + "@jest/console" "^29.7.0" + "@jest/environment" "^29.7.0" + "@jest/test-result" "^29.7.0" + "@jest/transform" "^29.7.0" + "@jest/types" "^29.6.3" "@types/node" "*" chalk "^4.0.0" emittery "^0.13.1" graceful-fs "^4.2.9" - jest-docblock "^29.4.3" - jest-environment-node "^29.6.2" - jest-haste-map "^29.6.2" - jest-leak-detector "^29.6.2" - jest-message-util "^29.6.2" - jest-resolve "^29.6.2" - jest-runtime "^29.6.2" - jest-util "^29.6.2" - jest-watcher "^29.6.2" - jest-worker "^29.6.2" + jest-docblock "^29.7.0" + jest-environment-node "^29.7.0" + jest-haste-map "^29.7.0" + jest-leak-detector "^29.7.0" + jest-message-util "^29.7.0" + jest-resolve "^29.7.0" + jest-runtime "^29.7.0" + jest-util "^29.7.0" + jest-watcher "^29.7.0" + jest-worker "^29.7.0" p-limit "^3.1.0" source-map-support "0.5.13" -jest-runtime@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-29.6.2.tgz#692f25e387f982e89ab83270e684a9786248e545" - integrity sha512-2X9dqK768KufGJyIeLmIzToDmsN0m7Iek8QNxRSI/2+iPFYHF0jTwlO3ftn7gdKd98G/VQw9XJCk77rbTGZnJg== - dependencies: - "@jest/environment" "^29.6.2" - "@jest/fake-timers" "^29.6.2" - "@jest/globals" "^29.6.2" - "@jest/source-map" "^29.6.0" - "@jest/test-result" "^29.6.2" - "@jest/transform" "^29.6.2" - "@jest/types" "^29.6.1" +jest-runtime@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-runtime/-/jest-runtime-29.7.0.tgz#efecb3141cf7d3767a3a0cc8f7c9990587d3d817" + integrity sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ== + dependencies: + "@jest/environment" "^29.7.0" + "@jest/fake-timers" "^29.7.0" + "@jest/globals" "^29.7.0" + "@jest/source-map" "^29.6.3" + "@jest/test-result" "^29.7.0" + "@jest/transform" "^29.7.0" + "@jest/types" "^29.6.3" "@types/node" "*" chalk "^4.0.0" cjs-module-lexer "^1.0.0" collect-v8-coverage "^1.0.0" glob "^7.1.3" graceful-fs "^4.2.9" - jest-haste-map "^29.6.2" - jest-message-util "^29.6.2" - jest-mock "^29.6.2" - jest-regex-util "^29.4.3" - jest-resolve "^29.6.2" - jest-snapshot "^29.6.2" - jest-util "^29.6.2" + jest-haste-map "^29.7.0" + jest-message-util "^29.7.0" + jest-mock "^29.7.0" + jest-regex-util "^29.6.3" + jest-resolve "^29.7.0" + jest-snapshot "^29.7.0" + jest-util "^29.7.0" slash "^3.0.0" strip-bom "^4.0.0" jest-silent-reporter@0.5.0: version "0.5.0" - resolved "https://registry.yarnpkg.com/jest-silent-reporter/-/jest-silent-reporter-0.5.0.tgz#5fd8ccd61665227e3bf19d908b7350719d06ff38" + resolved "https://registry.npmjs.org/jest-silent-reporter/-/jest-silent-reporter-0.5.0.tgz#5fd8ccd61665227e3bf19d908b7350719d06ff38" integrity sha512-epdLt8Oj0a1AyRiR6F8zx/1SVT1Mi7VU3y4wB2uOBHs/ohIquC7v2eeja7UN54uRPyHInIKWdL+RdG228n5pJQ== dependencies: chalk "^4.0.0" jest-util "^26.0.0" -jest-snapshot@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-29.6.2.tgz#9b431b561a83f2bdfe041e1cab8a6becdb01af9c" - integrity sha512-1OdjqvqmRdGNvWXr/YZHuyhh5DeaLp1p/F8Tht/MrMw4Kr1Uu/j4lRG+iKl1DAqUJDWxtQBMk41Lnf/JETYBRA== +jest-snapshot@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.7.0.tgz#c2c574c3f51865da1bb329036778a69bf88a6be5" + integrity sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw== dependencies: "@babel/core" "^7.11.6" "@babel/generator" "^7.7.2" "@babel/plugin-syntax-jsx" "^7.7.2" "@babel/plugin-syntax-typescript" "^7.7.2" "@babel/types" "^7.3.3" - "@jest/expect-utils" "^29.6.2" - "@jest/transform" "^29.6.2" - "@jest/types" "^29.6.1" + "@jest/expect-utils" "^29.7.0" + "@jest/transform" "^29.7.0" + "@jest/types" "^29.6.3" babel-preset-current-node-syntax "^1.0.0" chalk "^4.0.0" - expect "^29.6.2" + expect "^29.7.0" graceful-fs "^4.2.9" - jest-diff "^29.6.2" - jest-get-type "^29.4.3" - jest-matcher-utils "^29.6.2" - jest-message-util "^29.6.2" - jest-util "^29.6.2" + jest-diff "^29.7.0" + jest-get-type "^29.6.3" + jest-matcher-utils "^29.7.0" + jest-message-util "^29.7.0" + jest-util "^29.7.0" natural-compare "^1.4.0" - pretty-format "^29.6.2" + pretty-format "^29.7.0" semver "^7.5.3" jest-util@^26.0.0: version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-26.6.2.tgz#907535dbe4d5a6cb4c47ac9b926f6af29576cbc1" + resolved "https://registry.npmjs.org/jest-util/-/jest-util-26.6.2.tgz#907535dbe4d5a6cb4c47ac9b926f6af29576cbc1" integrity sha512-MDW0fKfsn0OI7MS7Euz6h8HNDXVQ0gaM9uW6RjfDmd1DAFcaxX9OqIakHIqhbnmF08Cf2DLDG+ulq8YQQ0Lp0Q== dependencies: "@jest/types" "^26.6.2" @@ -4512,66 +4708,66 @@ jest-util@^26.0.0: is-ci "^2.0.0" micromatch "^4.0.2" -jest-util@^29.0.0, jest-util@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-29.6.2.tgz#8a052df8fff2eebe446769fd88814521a517664d" - integrity sha512-3eX1qb6L88lJNCFlEADKOkjpXJQyZRiavX1INZ4tRnrBVr2COd3RgcTLyUiEXMNBlDU/cgYq6taUS0fExrWW4w== +jest-util@^29.0.0, jest-util@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz#23c2b62bfb22be82b44de98055802ff3710fc0bc" + integrity sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA== dependencies: - "@jest/types" "^29.6.1" + "@jest/types" "^29.6.3" "@types/node" "*" chalk "^4.0.0" ci-info "^3.2.0" graceful-fs "^4.2.9" picomatch "^2.2.3" -jest-validate@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-29.6.2.tgz#25d972af35b2415b83b1373baf1a47bb266c1082" - integrity sha512-vGz0yMN5fUFRRbpJDPwxMpgSXW1LDKROHfBopAvDcmD6s+B/s8WJrwi+4bfH4SdInBA5C3P3BI19dBtKzx1Arg== +jest-validate@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-validate/-/jest-validate-29.7.0.tgz#7bf705511c64da591d46b15fce41400d52147d9c" + integrity sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw== dependencies: - "@jest/types" "^29.6.1" + "@jest/types" "^29.6.3" camelcase "^6.2.0" chalk "^4.0.0" - jest-get-type "^29.4.3" + jest-get-type "^29.6.3" leven "^3.1.0" - pretty-format "^29.6.2" + pretty-format "^29.7.0" -jest-watcher@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-29.6.2.tgz#77c224674f0620d9f6643c4cfca186d8893ca088" - integrity sha512-GZitlqkMkhkefjfN/p3SJjrDaxPflqxEAv3/ik10OirZqJGYH5rPiIsgVcfof0Tdqg3shQGdEIxDBx+B4tuLzA== +jest-watcher@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-watcher/-/jest-watcher-29.7.0.tgz#7810d30d619c3a62093223ce6bb359ca1b28a2f2" + integrity sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g== dependencies: - "@jest/test-result" "^29.6.2" - "@jest/types" "^29.6.1" + "@jest/test-result" "^29.7.0" + "@jest/types" "^29.6.3" "@types/node" "*" ansi-escapes "^4.2.1" chalk "^4.0.0" emittery "^0.13.1" - jest-util "^29.6.2" + jest-util "^29.7.0" string-length "^4.0.1" jest-worker@^27.4.5: version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0" + resolved "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0" integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== dependencies: "@types/node" "*" merge-stream "^2.0.0" supports-color "^8.0.0" -jest-worker@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-29.6.2.tgz#682fbc4b6856ad0aa122a5403c6d048b83f3fb44" - integrity sha512-l3ccBOabTdkng8I/ORCkADz4eSMKejTYv1vB/Z83UiubqhC1oQ5Li6dWCyqOIvSifGjUBxuvxvlm6KGK2DtuAQ== +jest-worker@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz#acad073acbbaeb7262bd5389e1bcf43e10058d4a" + integrity sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw== dependencies: "@types/node" "*" - jest-util "^29.6.2" + jest-util "^29.7.0" merge-stream "^2.0.0" supports-color "^8.0.0" jest@29.6.2: version "29.6.2" - resolved "https://registry.yarnpkg.com/jest/-/jest-29.6.2.tgz#3bd55b9fd46a161b2edbdf5f1d1bd0d1eab76c42" + resolved "https://registry.npmjs.org/jest/-/jest-29.6.2.tgz#3bd55b9fd46a161b2edbdf5f1d1bd0d1eab76c42" integrity sha512-8eQg2mqFbaP7CwfsTpCxQ+sHzw1WuNWL5UUvjnWP4hx2riGz9fPSzYOaU5q8/GqWn1TfgZIVTqYJygbGbWAANg== dependencies: "@jest/core" "^29.6.2" @@ -4581,12 +4777,12 @@ jest@29.6.2: js-tokens@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + resolved "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== js-yaml@^3.13.1: version "3.14.1" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== dependencies: argparse "^1.0.7" @@ -4594,76 +4790,81 @@ js-yaml@^3.13.1: js-yaml@^4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== dependencies: argparse "^2.0.1" jsesc@^2.5.1: version "2.5.2" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + resolved "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== jsesc@^3.0.2: version "3.0.2" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-3.0.2.tgz#bb8b09a6597ba426425f2e4a07245c3d00b9343e" + resolved "https://registry.npmjs.org/jsesc/-/jsesc-3.0.2.tgz#bb8b09a6597ba426425f2e4a07245c3d00b9343e" integrity sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g== jsesc@~0.5.0: version "0.5.0" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" + resolved "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== json-bignum@^0.0.3: version "0.0.3" - resolved "https://registry.yarnpkg.com/json-bignum/-/json-bignum-0.0.3.tgz#41163b50436c773d82424dbc20ed70db7604b8d7" + resolved "https://registry.npmjs.org/json-bignum/-/json-bignum-0.0.3.tgz#41163b50436c773d82424dbc20ed70db7604b8d7" integrity sha512-2WHyXj3OfHSgNyuzDbSxI1w2jgw5gkWSWhS7Qg4bWXx1nLk3jnbwfUeS0PSba3IzpTUWdHxBieELUzXRjQB2zg== +json-buffer@3.0.1: + version "3.0.1" + resolved "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" + integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ== + json-joy@^9.2.0: - version "9.5.1" - resolved "https://registry.yarnpkg.com/json-joy/-/json-joy-9.5.1.tgz#056683b4db4b0e279451a563a756b70b9fd97fa3" - integrity sha512-XMSpdxaiWUZlc+CAUbPS3G2MZbGxm6clFatqjta/DLrq5V4Y5JU4cx7Qvy7l+XTVPvmRWaYuzzAuCf9uUc40IA== + version "9.6.0" + resolved "https://registry.npmjs.org/json-joy/-/json-joy-9.6.0.tgz#b691310024205b2d082737ca3c7e72cac0e364ac" + integrity sha512-vJtJD89T0OOZFMaENe95xKCOdibMev/lELkclTdhZxLplwbBPxneWNuctUPizk2nLqtGfBxwCXVO42G9LBoFBA== dependencies: arg "^5.0.2" hyperdyperid "^1.2.0" json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: version "2.3.1" - resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" + resolved "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== json-schema-traverse@^0.4.1: version "0.4.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + resolved "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== json-stable-stringify-without-jsonify@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + resolved "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== json2csv@^5.0.6: version "5.0.7" - resolved "https://registry.yarnpkg.com/json2csv/-/json2csv-5.0.7.tgz#f3a583c25abd9804be873e495d1e65ad8d1b54ae" + resolved "https://registry.npmjs.org/json2csv/-/json2csv-5.0.7.tgz#f3a583c25abd9804be873e495d1e65ad8d1b54ae" integrity sha512-YRZbUnyaJZLZUJSRi2G/MqahCyRv9n/ds+4oIetjDF3jWQA7AG7iSeKTiZiCNqtMZM7HDyt0e/W6lEnoGEmMGA== dependencies: commander "^6.1.0" jsonparse "^1.3.1" lodash.get "^4.4.2" -json5@^2.2.2, json5@^2.2.3: +json5@^2.2.3: version "2.2.3" - resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" + resolved "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== jsonc-parser@^3.2.0: version "3.2.0" - resolved "https://registry.yarnpkg.com/jsonc-parser/-/jsonc-parser-3.2.0.tgz#31ff3f4c2b9793f89c67212627c51c6394f88e76" + resolved "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.0.tgz#31ff3f4c2b9793f89c67212627c51c6394f88e76" integrity sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w== jsonfile@^6.0.1: version "6.1.0" - resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" + resolved "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== dependencies: universalify "^2.0.0" @@ -4672,51 +4873,58 @@ jsonfile@^6.0.1: jsonparse@^1.3.1: version "1.3.1" - resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280" + resolved "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280" integrity sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg== just-debounce@^1.0.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/just-debounce/-/just-debounce-1.1.0.tgz#2f81a3ad4121a76bc7cb45dbf704c0d76a8e5ddf" + resolved "https://registry.npmjs.org/just-debounce/-/just-debounce-1.1.0.tgz#2f81a3ad4121a76bc7cb45dbf704c0d76a8e5ddf" integrity sha512-qpcRocdkUmf+UTNBYx5w6dexX5J31AKK1OmPwH630a83DdVVUIngk55RSAiIGpQyoH0dlr872VHfPjnQnK1qDQ== +keyv@^4.5.3: + version "4.5.3" + resolved "https://registry.npmjs.org/keyv/-/keyv-4.5.3.tgz#00873d2b046df737963157bd04f294ca818c9c25" + integrity sha512-QCiSav9WaX1PgETJ+SpNnx2PRRapJ/oRSXM4VO5OGYGSjrxbKPVFVhB3l2OCbLCk329N8qyAtsJjSjvVBWzEug== + dependencies: + json-buffer "3.0.1" + kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" + resolved "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" integrity sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ== dependencies: is-buffer "^1.1.5" kind-of@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" + resolved "https://registry.npmjs.org/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" integrity sha512-24XsCxmEbRwEDbz/qz3stgin8TTzZ1ESR56OMCN0ujYg+vRutNSiOj9bHH9u85DKgXguraugV5sFuvbD4FW/hw== dependencies: is-buffer "^1.1.5" kind-of@^5.0.0, kind-of@^5.0.2: version "5.1.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" + resolved "https://registry.npmjs.org/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== kind-of@^6.0.0, kind-of@^6.0.2, kind-of@^6.0.3: version "6.0.3" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" + resolved "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== kleur@^3.0.3: version "3.0.3" - resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" + resolved "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== kleur@^4.1.4: version "4.1.5" - resolved "https://registry.yarnpkg.com/kleur/-/kleur-4.1.5.tgz#95106101795f7050c6c650f350c683febddb1780" + resolved "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz#95106101795f7050c6c650f350c683febddb1780" integrity sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ== last-run@^1.1.0: version "1.1.1" - resolved "https://registry.yarnpkg.com/last-run/-/last-run-1.1.1.tgz#45b96942c17b1c79c772198259ba943bebf8ca5b" + resolved "https://registry.npmjs.org/last-run/-/last-run-1.1.1.tgz#45b96942c17b1c79c772198259ba943bebf8ca5b" integrity sha512-U/VxvpX4N/rFvPzr3qG5EtLKEnNI0emvIQB3/ecEwv+8GHaUKbIB8vxv1Oai5FAF0d0r7LXHhLLe5K/yChm5GQ== dependencies: default-resolution "^2.0.0" @@ -4724,33 +4932,33 @@ last-run@^1.1.0: lazystream@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/lazystream/-/lazystream-1.0.1.tgz#494c831062f1f9408251ec44db1cba29242a2638" + resolved "https://registry.npmjs.org/lazystream/-/lazystream-1.0.1.tgz#494c831062f1f9408251ec44db1cba29242a2638" integrity sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw== dependencies: readable-stream "^2.0.5" lcid@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/lcid/-/lcid-1.0.0.tgz#308accafa0bc483a3867b4b6f2b9506251d1b835" + resolved "https://registry.npmjs.org/lcid/-/lcid-1.0.0.tgz#308accafa0bc483a3867b4b6f2b9506251d1b835" integrity sha512-YiGkH6EnGrDGqLMITnGjXtGmNtjoXw9SVUzcaos8RBi7Ps0VBylkq+vOcY9QE5poLasPCR849ucFUkl0UzUyOw== dependencies: invert-kv "^1.0.0" lead@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/lead/-/lead-1.0.0.tgz#6f14f99a37be3a9dd784f5495690e5903466ee42" + resolved "https://registry.npmjs.org/lead/-/lead-1.0.0.tgz#6f14f99a37be3a9dd784f5495690e5903466ee42" integrity sha512-IpSVCk9AYvLHo5ctcIXxOBpMWUe+4TKN3VPWAKUbJikkmsGp0VrSM8IttVc32D6J4WUsiPE6aEFRNmIoF/gdow== dependencies: flush-write-stream "^1.0.2" leven@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" + resolved "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== levn@^0.4.1: version "0.4.1" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + resolved "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== dependencies: prelude-ls "^1.2.1" @@ -4758,7 +4966,7 @@ levn@^0.4.1: liftoff@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/liftoff/-/liftoff-3.1.0.tgz#c9ba6081f908670607ee79062d700df062c52ed3" + resolved "https://registry.npmjs.org/liftoff/-/liftoff-3.1.0.tgz#c9ba6081f908670607ee79062d700df062c52ed3" integrity sha512-DlIPlJUkCV0Ips2zf2pJP0unEoT1kwYhiiPUGF3s/jtxTCjziNLoiVVh+jqWOWeFi6mmwQ5fNxvAUyPad4Dfog== dependencies: extend "^3.0.0" @@ -4772,12 +4980,12 @@ liftoff@^3.1.0: lines-and-columns@^1.1.6: version "1.2.4" - resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + resolved "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== load-json-file@^1.0.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" + resolved "https://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" integrity sha512-cy7ZdNRXdablkXYNI049pthVeXFurRyb9+hA/dZzerZ0pGTx42z+y+ssxBaVV2l70t1muq5IdKhn4UtcoGUY9A== dependencies: graceful-fs "^4.1.2" @@ -4788,96 +4996,96 @@ load-json-file@^1.0.0: loader-runner@^4.2.0: version "4.3.0" - resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1" + resolved "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1" integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== locate-path@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + resolved "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== dependencies: p-locate "^4.1.0" locate-path@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" + resolved "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== dependencies: p-locate "^5.0.0" lodash.assignwith@^4.2.0: version "4.2.0" - resolved "https://registry.yarnpkg.com/lodash.assignwith/-/lodash.assignwith-4.2.0.tgz#127a97f02adc41751a954d24b0de17e100e038eb" + resolved "https://registry.npmjs.org/lodash.assignwith/-/lodash.assignwith-4.2.0.tgz#127a97f02adc41751a954d24b0de17e100e038eb" integrity sha512-ZznplvbvtjK2gMvnQ1BR/zqPFZmS6jbK4p+6Up4xcRYA7yMIwxHCfbTcrYxXKzzqLsQ05eJPVznEW3tuwV7k1g== lodash.camelcase@^4.3.0: version "4.3.0" - resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6" + resolved "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6" integrity sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA== lodash.clone@^4.3.2: version "4.5.0" - resolved "https://registry.yarnpkg.com/lodash.clone/-/lodash.clone-4.5.0.tgz#195870450f5a13192478df4bc3d23d2dea1907b6" + resolved "https://registry.npmjs.org/lodash.clone/-/lodash.clone-4.5.0.tgz#195870450f5a13192478df4bc3d23d2dea1907b6" integrity sha512-GhrVeweiTD6uTmmn5hV/lzgCQhccwReIVRLHp7LT4SopOjqEZ5BbX8b5WWEtAKasjmy8hR7ZPwsYlxRCku5odg== lodash.debounce@^4.0.8: version "4.0.8" - resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" + resolved "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow== lodash.escape@^4.0.1: version "4.0.1" - resolved "https://registry.yarnpkg.com/lodash.escape/-/lodash.escape-4.0.1.tgz#c9044690c21e04294beaa517712fded1fa88de98" + resolved "https://registry.npmjs.org/lodash.escape/-/lodash.escape-4.0.1.tgz#c9044690c21e04294beaa517712fded1fa88de98" integrity sha512-nXEOnb/jK9g0DYMr1/Xvq6l5xMD7GDG55+GSYIYmS0G4tBk/hURD4JR9WCavs04t33WmJx9kCyp9vJ+mr4BOUw== lodash.flatten@^4.4.0: version "4.4.0" - resolved "https://registry.yarnpkg.com/lodash.flatten/-/lodash.flatten-4.4.0.tgz#f31c22225a9632d2bbf8e4addbef240aa765a61f" + resolved "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz#f31c22225a9632d2bbf8e4addbef240aa765a61f" integrity sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g== lodash.get@^4.4.2: version "4.4.2" - resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" + resolved "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" integrity sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ== lodash.invokemap@^4.6.0: version "4.6.0" - resolved "https://registry.yarnpkg.com/lodash.invokemap/-/lodash.invokemap-4.6.0.tgz#1748cda5d8b0ef8369c4eb3ec54c21feba1f2d62" + resolved "https://registry.npmjs.org/lodash.invokemap/-/lodash.invokemap-4.6.0.tgz#1748cda5d8b0ef8369c4eb3ec54c21feba1f2d62" integrity sha512-CfkycNtMqgUlfjfdh2BhKO/ZXrP8ePOX5lEU/g0R3ItJcnuxWDwokMGKx1hWcfOikmyOVx6X9IwWnDGlgKl61w== lodash.memoize@4.x: version "4.1.2" - resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" + resolved "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== lodash.merge@^4.6.2: version "4.6.2" - resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + resolved "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== lodash.pullall@^4.2.0: version "4.2.0" - resolved "https://registry.yarnpkg.com/lodash.pullall/-/lodash.pullall-4.2.0.tgz#9d98b8518b7c965b0fae4099bd9fb7df8bbf38ba" + resolved "https://registry.npmjs.org/lodash.pullall/-/lodash.pullall-4.2.0.tgz#9d98b8518b7c965b0fae4099bd9fb7df8bbf38ba" integrity sha512-VhqxBKH0ZxPpLhiu68YD1KnHmbhQJQctcipvmFnqIBDYzcIHzf3Zpu0tpeOKtR4x76p9yohc506eGdOjTmyIBg== lodash.some@^4.2.2: version "4.6.0" - resolved "https://registry.yarnpkg.com/lodash.some/-/lodash.some-4.6.0.tgz#1bb9f314ef6b8baded13b549169b2a945eb68e4d" + resolved "https://registry.npmjs.org/lodash.some/-/lodash.some-4.6.0.tgz#1bb9f314ef6b8baded13b549169b2a945eb68e4d" integrity sha512-j7MJE+TuT51q9ggt4fSgVqro163BEFjAt3u97IqU+JA2DkWl80nFTrowzLpZ/BnpN7rrl0JA/593NAdd8p/scQ== lodash.uniqby@^4.7.0: version "4.7.0" - resolved "https://registry.yarnpkg.com/lodash.uniqby/-/lodash.uniqby-4.7.0.tgz#d99c07a669e9e6d24e1362dfe266c67616af1302" + resolved "https://registry.npmjs.org/lodash.uniqby/-/lodash.uniqby-4.7.0.tgz#d99c07a669e9e6d24e1362dfe266c67616af1302" integrity sha512-e/zcLx6CSbmaEgFHCA7BnoQKyCtKMxnuWrJygbwPs/AIn+IMKl66L8/s+wBUn5LRw2pZx3bUHibiV1b6aTWIww== lodash@^4.17.21, lodash@^4.17.4: version "4.17.21" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + resolved "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== log-update@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/log-update/-/log-update-4.0.0.tgz#589ecd352471f2a1c0c570287543a64dfd20e0a1" + resolved "https://registry.npmjs.org/log-update/-/log-update-4.0.0.tgz#589ecd352471f2a1c0c570287543a64dfd20e0a1" integrity sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg== dependencies: ansi-escapes "^4.3.0" @@ -4887,91 +5095,91 @@ log-update@^4.0.0: lru-cache@^5.1.1: version "5.1.1" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" + resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== dependencies: yallist "^3.0.2" lru-cache@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== dependencies: yallist "^4.0.0" -lru-cache@^9.1.1: - version "9.1.2" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-9.1.2.tgz#255fdbc14b75589d6d0e73644ca167a8db506835" - integrity sha512-ERJq3FOzJTxBbFjZ7iDs+NiK4VI9Wz+RdrrAB8dio1oV+YvdPzUEE4QNiT2VD51DkIbCYRUUzCRkssXCHqSnKQ== +"lru-cache@^9.1.1 || ^10.0.0": + version "10.0.1" + resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz#0a3be479df549cca0e5d693ac402ff19537a6b7a" + integrity sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g== lru-queue@^0.1.0: version "0.1.0" - resolved "https://registry.yarnpkg.com/lru-queue/-/lru-queue-0.1.0.tgz#2738bd9f0d3cf4f84490c5736c48699ac632cda3" + resolved "https://registry.npmjs.org/lru-queue/-/lru-queue-0.1.0.tgz#2738bd9f0d3cf4f84490c5736c48699ac632cda3" integrity sha512-BpdYkt9EvGl8OfWHDQPISVpcl5xZthb+XPsbELj5AQXxIC8IriDZIQYjBJPEm5rS420sjZ0TLEzRcq5KdBhYrQ== dependencies: es5-ext "~0.10.2" lunr@^2.3.9: version "2.3.9" - resolved "https://registry.yarnpkg.com/lunr/-/lunr-2.3.9.tgz#18b123142832337dd6e964df1a5a7707b25d35e1" + resolved "https://registry.npmjs.org/lunr/-/lunr-2.3.9.tgz#18b123142832337dd6e964df1a5a7707b25d35e1" integrity sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow== -make-dir@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" - integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== +make-dir@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz#c3c2307a771277cd9638305f915c29ae741b614e" + integrity sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw== dependencies: - semver "^6.0.0" + semver "^7.5.3" make-error@1.x, make-error@^1.1.1: version "1.3.6" - resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" + resolved "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== make-iterator@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/make-iterator/-/make-iterator-1.0.1.tgz#29b33f312aa8f547c4a5e490f56afcec99133ad6" + resolved "https://registry.npmjs.org/make-iterator/-/make-iterator-1.0.1.tgz#29b33f312aa8f547c4a5e490f56afcec99133ad6" integrity sha512-pxiuXh0iVEq7VM7KMIhs5gxsfxCux2URptUQaXo4iZZJxBAzTPOLE2BumO5dbfVYq/hBJFBR/a1mFDmOx5AGmw== dependencies: kind-of "^6.0.2" makeerror@1.0.12: version "1.0.12" - resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" + resolved "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== dependencies: tmpl "1.0.5" map-cache@^0.2.0, map-cache@^0.2.2: version "0.2.2" - resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" + resolved "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" integrity sha512-8y/eV9QQZCiyn1SprXSrCmqJN0yNRATe+PO8ztwqrvrbdRLA3eYJF0yaR0YayLWkMbsQSKWS9N2gPcGEc4UsZg== map-obj@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" + resolved "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" integrity sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg== map-obj@^4.1.0: version "4.3.0" - resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-4.3.0.tgz#9304f906e93faae70880da102a9f1df0ea8bb05a" + resolved "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz#9304f906e93faae70880da102a9f1df0ea8bb05a" integrity sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ== map-visit@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" + resolved "https://registry.npmjs.org/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" integrity sha512-4y7uGv8bd2WdM9vpQsiQNo41Ln1NvhvDRuVt0k2JZQ+ezN2uaQes7lZeZ+QQUHOLQAtDaBJ+7wCbi+ab/KFs+w== dependencies: object-visit "^1.0.0" marked@^4.3.0: version "4.3.0" - resolved "https://registry.yarnpkg.com/marked/-/marked-4.3.0.tgz#796362821b019f734054582038b116481b456cf3" + resolved "https://registry.npmjs.org/marked/-/marked-4.3.0.tgz#796362821b019f734054582038b116481b456cf3" integrity sha512-PRsaiG84bK+AMvxziE/lCFss8juXjNaWzVbN5tXAm4XjeaS9NAHhop+PjQxz2A9h8Q4M/xGmzP8vqNwy6JeK0A== matchdep@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/matchdep/-/matchdep-2.0.0.tgz#c6f34834a0d8dbc3b37c27ee8bbcb27c7775582e" + resolved "https://registry.npmjs.org/matchdep/-/matchdep-2.0.0.tgz#c6f34834a0d8dbc3b37c27ee8bbcb27c7775582e" integrity sha512-LFgVbaHIHMqCRuCZyfCtUOq9/Lnzhi7Z0KFUE2fhD54+JN2jLh3hC02RLkqauJ3U4soU6H1J3tfj/Byk7GoEjA== dependencies: findup-sync "^2.0.0" @@ -4981,7 +5189,7 @@ matchdep@^2.0.0: math-random@^1.0.1: version "1.0.4" - resolved "https://registry.yarnpkg.com/math-random/-/math-random-1.0.4.tgz#5dd6943c938548267016d4e34f057583080c514c" + resolved "https://registry.npmjs.org/math-random/-/math-random-1.0.4.tgz#5dd6943c938548267016d4e34f057583080c514c" integrity sha512-rUxjysqif/BZQH2yhd5Aaq7vXMSx9NdEsQcyA07uEzIvxgI7zIr33gGsh+RU0/XjmQpCW7RsVof1vlkvQVCK5A== memfs@4.5.0: @@ -4994,7 +5202,7 @@ memfs@4.5.0: memoizee@0.4.X: version "0.4.15" - resolved "https://registry.yarnpkg.com/memoizee/-/memoizee-0.4.15.tgz#e6f3d2da863f318d02225391829a6c5956555b72" + resolved "https://registry.npmjs.org/memoizee/-/memoizee-0.4.15.tgz#e6f3d2da863f318d02225391829a6c5956555b72" integrity sha512-UBWmJpLZd5STPm7PMUlOw/TSy972M+z8gcyQ5veOnSDRREz/0bmpyTfKt3/51DhEBqCZQn1udM/5flcSPYhkdQ== dependencies: d "^1.0.1" @@ -5008,7 +5216,7 @@ memoizee@0.4.X: memory-fs@^0.5.0: version "0.5.0" - resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.5.0.tgz#324c01288b88652966d161db77838720845a8e3c" + resolved "https://registry.npmjs.org/memory-fs/-/memory-fs-0.5.0.tgz#324c01288b88652966d161db77838720845a8e3c" integrity sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA== dependencies: errno "^0.1.3" @@ -5016,7 +5224,7 @@ memory-fs@^0.5.0: meow@^10.1.3: version "10.1.5" - resolved "https://registry.yarnpkg.com/meow/-/meow-10.1.5.tgz#be52a1d87b5f5698602b0f32875ee5940904aa7f" + resolved "https://registry.npmjs.org/meow/-/meow-10.1.5.tgz#be52a1d87b5f5698602b0f32875ee5940904aa7f" integrity sha512-/d+PQ4GKmGvM9Bee/DPa8z3mXs/pkvJE2KEThngVNOqtmljC6K7NMPxtc2JeZYTmpWb9k/TmxjeL18ez3h7vCw== dependencies: "@types/minimist" "^1.2.2" @@ -5034,17 +5242,17 @@ meow@^10.1.3: merge-stream@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" + resolved "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== merge2@^1.3.0, merge2@^1.4.1: version "1.4.1" - resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + resolved "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== micromatch@^3.0.4, micromatch@^3.1.10, micromatch@^3.1.4: version "3.1.10" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" + resolved "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== dependencies: arr-diff "^4.0.0" @@ -5063,7 +5271,7 @@ micromatch@^3.0.4, micromatch@^3.1.10, micromatch@^3.1.4: micromatch@^4.0.2, micromatch@^4.0.4: version "4.0.5" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" + resolved "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== dependencies: braces "^3.0.2" @@ -5071,43 +5279,43 @@ micromatch@^4.0.2, micromatch@^4.0.4: mime-db@1.52.0: version "1.52.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== mime-types@^2.1.27: version "2.1.35" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== dependencies: mime-db "1.52.0" mimic-fn@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + resolved "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== min-indent@^1.0.0, min-indent@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" + resolved "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: version "3.1.2" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== dependencies: brace-expansion "^1.1.7" minimatch@^9.0.0, minimatch@^9.0.1: - version "9.0.1" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.1.tgz#8a555f541cf976c622daf078bb28f29fb927c253" - integrity sha512-0jWhJpD/MdhPXwPuiRkCbfYfSKp2qnn2eOc279qI7f+osl/l+prKSrvhg157zSYvx/1nmgn2NqdT6k2Z7zSH9w== + version "9.0.3" + resolved "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz#a6e00c3de44c3a542bfaae70abfc22420a6da825" + integrity sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg== dependencies: brace-expansion "^2.0.1" minimist-options@4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/minimist-options/-/minimist-options-4.1.0.tgz#c0655713c53a8a2ebd77ffa247d342c40f010619" + resolved "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz#c0655713c53a8a2ebd77ffa247d342c40f010619" integrity sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A== dependencies: arrify "^1.0.1" @@ -5116,17 +5324,22 @@ minimist-options@4.1.0: minimist@1.x: version "1.2.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" + resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== "minipass@^5.0.0 || ^6.0.2": version "6.0.2" - resolved "https://registry.yarnpkg.com/minipass/-/minipass-6.0.2.tgz#542844b6c4ce95b202c0995b0a471f1229de4c81" + resolved "https://registry.npmjs.org/minipass/-/minipass-6.0.2.tgz#542844b6c4ce95b202c0995b0a471f1229de4c81" integrity sha512-MzWSV5nYVT7mVyWCwn2o7JH13w2TBRmmSqSRCKzTw+lmft9X4z+3wjvs06Tzijo5z4W/kahUCDpRXTF+ZrmF/w== +"minipass@^5.0.0 || ^6.0.2 || ^7.0.0": + version "7.0.3" + resolved "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz#05ea638da44e475037ed94d1c7efcc76a25e1974" + integrity sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg== + mixin-deep@^1.2.0: version "1.3.2" - resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" + resolved "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== dependencies: for-in "^1.0.2" @@ -5134,32 +5347,32 @@ mixin-deep@^1.2.0: mkdirp@3.0.1: version "3.0.1" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-3.0.1.tgz#e44e4c5607fb279c168241713cc6e0fea9adcb50" + resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz#e44e4c5607fb279c168241713cc6e0fea9adcb50" integrity sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg== mrmime@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/mrmime/-/mrmime-1.0.1.tgz#5f90c825fad4bdd41dc914eff5d1a8cfdaf24f27" + resolved "https://registry.npmjs.org/mrmime/-/mrmime-1.0.1.tgz#5f90c825fad4bdd41dc914eff5d1a8cfdaf24f27" integrity sha512-hzzEagAgDyoU1Q6yg5uI+AorQgdvMCur3FcKf7NhMKWsaYg+RnbTyHRa/9IlLF9rf455MOCtcqqrQQ83pPP7Uw== ms@2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + resolved "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== ms@2.1.2: version "2.1.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== ms@^2.1.1: version "2.1.3" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + resolved "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== multistream@4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/multistream/-/multistream-4.1.0.tgz#7bf00dfd119556fbc153cff3de4c6d477909f5a8" + resolved "https://registry.npmjs.org/multistream/-/multistream-4.1.0.tgz#7bf00dfd119556fbc153cff3de4c6d477909f5a8" integrity sha512-J1XDiAmmNpRCBfIWJv+n0ymC4ABcf/Pl+5YvC5B/D2f/2+8PtHvCNxMPKiQcZyi922Hq69J2YOpb1pTywfifyw== dependencies: once "^1.4.0" @@ -5167,17 +5380,17 @@ multistream@4.1.0: mute-stdout@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/mute-stdout/-/mute-stdout-1.0.1.tgz#acb0300eb4de23a7ddeec014e3e96044b3472331" + resolved "https://registry.npmjs.org/mute-stdout/-/mute-stdout-1.0.1.tgz#acb0300eb4de23a7ddeec014e3e96044b3472331" integrity sha512-kDcwXR4PS7caBpuRYYBUz9iVixUk3anO3f5OYFiIPwK/20vCzKCHyKoulbiDY1S53zD2bxUpxN/IJ+TnXjfvxg== nan@^2.12.1: - version "2.17.0" - resolved "https://registry.yarnpkg.com/nan/-/nan-2.17.0.tgz#c0150a2368a182f033e9aa5195ec76ea41a199cb" - integrity sha512-2ZTgtl0nJsO0KQCjEpxcIr5D+Yv90plTitZt9JBfQvVJDS5seMl3FOvsh3+9CoYWXf/1l5OaZzzF6nDm4cagaQ== + version "2.18.0" + resolved "https://registry.npmjs.org/nan/-/nan-2.18.0.tgz#26a6faae7ffbeb293a39660e88a76b82e30b7554" + integrity sha512-W7tfG7vMOGtD30sHoZSSc/JVYiyDPEyQVso/Zz+/uQd0B0L46gtC+pHha5FFMRpil6fm/AoEcRWyOVi4+E/f8w== nanomatch@^1.2.9: version "1.2.13" - resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" + resolved "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== dependencies: arr-diff "^4.0.0" @@ -5194,37 +5407,37 @@ nanomatch@^1.2.9: natural-compare-lite@^1.4.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz#17b09581988979fddafe0201e931ba933c96cbb4" + resolved "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz#17b09581988979fddafe0201e931ba933c96cbb4" integrity sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g== natural-compare@^1.4.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + resolved "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== neo-async@^2.6.2: version "2.6.2" - resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" + resolved "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== next-tick@1, next-tick@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.1.0.tgz#1836ee30ad56d67ef281b22bd199f709449b35eb" + resolved "https://registry.npmjs.org/next-tick/-/next-tick-1.1.0.tgz#1836ee30ad56d67ef281b22bd199f709449b35eb" integrity sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ== node-int64@^0.4.0: version "0.4.0" - resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + resolved "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== -node-releases@^2.0.12: - version "2.0.12" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.12.tgz#35627cc224a23bfb06fb3380f2b3afaaa7eb1039" - integrity sha512-QzsYKWhXTWx8h1kIvqfnC++o0pEmpRQA/aenALsL2F4pqNVr7YzcdMlDij5WBnwftRbJCNJL/O7zdKaxKPHqgQ== +node-releases@^2.0.13: + version "2.0.13" + resolved "https://registry.npmjs.org/node-releases/-/node-releases-2.0.13.tgz#d5ed1627c23e3461e819b02e57b75e4899b1c81d" + integrity sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ== normalize-package-data@^2.3.2, normalize-package-data@^2.5.0: version "2.5.0" - resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" + resolved "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== dependencies: hosted-git-info "^2.1.4" @@ -5234,7 +5447,7 @@ normalize-package-data@^2.3.2, normalize-package-data@^2.5.0: normalize-package-data@^3.0.2: version "3.0.3" - resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-3.0.3.tgz#dbcc3e2da59509a0983422884cd172eefdfa525e" + resolved "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz#dbcc3e2da59509a0983422884cd172eefdfa525e" integrity sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA== dependencies: hosted-git-info "^4.0.1" @@ -5244,43 +5457,43 @@ normalize-package-data@^3.0.2: normalize-path@^2.0.1, normalize-path@^2.1.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" + resolved "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" integrity sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w== dependencies: remove-trailing-separator "^1.0.1" normalize-path@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + resolved "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== now-and-later@^2.0.0: version "2.0.1" - resolved "https://registry.yarnpkg.com/now-and-later/-/now-and-later-2.0.1.tgz#8e579c8685764a7cc02cb680380e94f43ccb1f7c" + resolved "https://registry.npmjs.org/now-and-later/-/now-and-later-2.0.1.tgz#8e579c8685764a7cc02cb680380e94f43ccb1f7c" integrity sha512-KGvQ0cB70AQfg107Xvs/Fbu+dGmZoTRJp2TaPwcwQm3/7PteUyN2BCgk8KBMPGBUXZdVwyWS8fDCGFygBm19UQ== dependencies: once "^1.3.2" npm-run-path@^4.0.1: version "4.0.1" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" + resolved "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== dependencies: path-key "^3.0.0" number-is-nan@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" + resolved "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" integrity sha512-4jbtZXNAsfZbAHiiqjLPBiCl16dES1zI4Hpzzxw61Tk+loF+sBDBKx1ICKKKwIqQ7M0mFn1TmkN7euSncWgHiQ== object-assign@4.X, object-assign@^4.0.1: version "4.1.1" - resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + resolved "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== object-copy@^0.1.0: version "0.1.0" - resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" + resolved "https://registry.npmjs.org/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" integrity sha512-79LYn6VAb63zgtmAteVOWo9Vdj71ZVBy3Pbse+VqxDpEP83XuujMrGqHIwAXJ5I/aM0zU7dIyIAhifVTPrNItQ== dependencies: copy-descriptor "^0.1.0" @@ -5289,19 +5502,19 @@ object-copy@^0.1.0: object-keys@^1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + resolved "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== object-visit@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" + resolved "https://registry.npmjs.org/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" integrity sha512-GBaMwwAVK9qbQN3Scdo0OyvgPW7l3lnaVMj84uTOZlswkX0KpF6fyDBJhtTthf7pymztoN36/KEr1DyhF96zEA== dependencies: isobject "^3.0.0" object.assign@^4.0.4, object.assign@^4.1.0: version "4.1.4" - resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" + resolved "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== dependencies: call-bind "^1.0.2" @@ -5311,7 +5524,7 @@ object.assign@^4.0.4, object.assign@^4.1.0: object.defaults@^1.0.0, object.defaults@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/object.defaults/-/object.defaults-1.1.0.tgz#3a7f868334b407dea06da16d88d5cd29e435fecf" + resolved "https://registry.npmjs.org/object.defaults/-/object.defaults-1.1.0.tgz#3a7f868334b407dea06da16d88d5cd29e435fecf" integrity sha512-c/K0mw/F11k4dEUBMW8naXUuBuhxRCfG7W+yFy8EcijU/rSmazOUd1XAEEe6bC0OuXY4HUKjTJv7xbxIMqdxrA== dependencies: array-each "^1.0.1" @@ -5321,7 +5534,7 @@ object.defaults@^1.0.0, object.defaults@^1.1.0: object.map@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/object.map/-/object.map-1.0.1.tgz#cf83e59dc8fcc0ad5f4250e1f78b3b81bd801d37" + resolved "https://registry.npmjs.org/object.map/-/object.map-1.0.1.tgz#cf83e59dc8fcc0ad5f4250e1f78b3b81bd801d37" integrity sha512-3+mAJu2PLfnSVGHwIWubpOFLscJANBKuB/6A4CxBstc4aqwQY0FWcsppuy4jU5GSB95yES5JHSI+33AWuS4k6w== dependencies: for-own "^1.0.0" @@ -5329,14 +5542,14 @@ object.map@^1.0.0: object.pick@^1.2.0, object.pick@^1.3.0: version "1.3.0" - resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" + resolved "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" integrity sha512-tqa/UMy/CCoYmj+H5qc07qvSL9dqcs/WZENZ1JbtWBlATP+iVOe778gE6MSijnyCnORzDuX6hU+LA4SZ09YjFQ== dependencies: isobject "^3.0.1" object.reduce@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/object.reduce/-/object.reduce-1.0.1.tgz#6fe348f2ac7fa0f95ca621226599096825bb03ad" + resolved "https://registry.npmjs.org/object.reduce/-/object.reduce-1.0.1.tgz#6fe348f2ac7fa0f95ca621226599096825bb03ad" integrity sha512-naLhxxpUESbNkRqc35oQ2scZSJueHGQNUfMW/0U37IgN6tE2dgDWg3whf+NEliy3F/QysrO48XKUz/nGPe+AQw== dependencies: for-own "^1.0.0" @@ -5344,106 +5557,106 @@ object.reduce@^1.0.0: once@^1.3.0, once@^1.3.1, once@^1.3.2, once@^1.4.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + resolved "https://registry.npmjs.org/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== dependencies: wrappy "1" onetime@^5.1.0, onetime@^5.1.2: version "5.1.2" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + resolved "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== dependencies: mimic-fn "^2.1.0" opener@^1.5.2: version "1.5.2" - resolved "https://registry.yarnpkg.com/opener/-/opener-1.5.2.tgz#5d37e1f35077b9dcac4301372271afdeb2a13598" + resolved "https://registry.npmjs.org/opener/-/opener-1.5.2.tgz#5d37e1f35077b9dcac4301372271afdeb2a13598" integrity sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A== -optionator@^0.9.1: - version "0.9.1" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" - integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== +optionator@^0.9.3: + version "0.9.3" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.3.tgz#007397d44ed1872fdc6ed31360190f81814e2c64" + integrity sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg== dependencies: + "@aashutoshrathi/word-wrap" "^1.2.3" deep-is "^0.1.3" fast-levenshtein "^2.0.6" levn "^0.4.1" prelude-ls "^1.2.1" type-check "^0.4.0" - word-wrap "^1.2.3" ordered-read-streams@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/ordered-read-streams/-/ordered-read-streams-1.0.1.tgz#77c0cb37c41525d64166d990ffad7ec6a0e1363e" + resolved "https://registry.npmjs.org/ordered-read-streams/-/ordered-read-streams-1.0.1.tgz#77c0cb37c41525d64166d990ffad7ec6a0e1363e" integrity sha512-Z87aSjx3r5c0ZB7bcJqIgIRX5bxR7A4aSzvIbaxd0oTkWBCOoKfuGHiKj60CHVUgg1Phm5yMZzBdt8XqRs73Mw== dependencies: readable-stream "^2.0.1" os-locale@^1.4.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-1.4.0.tgz#20f9f17ae29ed345e8bde583b13d2009803c14d9" + resolved "https://registry.npmjs.org/os-locale/-/os-locale-1.4.0.tgz#20f9f17ae29ed345e8bde583b13d2009803c14d9" integrity sha512-PRT7ZORmwu2MEFt4/fv3Q+mEfN4zetKxufQrkShY2oGvUms9r8otu5HfdyIFHkYXjO7laNsoVGmM2MANfuTA8g== dependencies: lcid "^1.0.0" p-limit@^2.2.0: version "2.3.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + resolved "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== dependencies: p-try "^2.0.0" p-limit@^3.0.2, p-limit@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" + resolved "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== dependencies: yocto-queue "^0.1.0" p-locate@^4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + resolved "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== dependencies: p-limit "^2.2.0" p-locate@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" + resolved "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== dependencies: p-limit "^3.0.2" p-map@^5.5.0: version "5.5.0" - resolved "https://registry.yarnpkg.com/p-map/-/p-map-5.5.0.tgz#054ca8ca778dfa4cf3f8db6638ccb5b937266715" + resolved "https://registry.npmjs.org/p-map/-/p-map-5.5.0.tgz#054ca8ca778dfa4cf3f8db6638ccb5b937266715" integrity sha512-VFqfGDHlx87K66yZrNdI4YGtD70IRyd+zSvgks6mzHPRNkoKy+9EKP4SFC77/vTTQYmRmti7dvqC+m5jBrBAcg== dependencies: aggregate-error "^4.0.0" p-try@^2.0.0: version "2.2.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + resolved "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== pad-left@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/pad-left/-/pad-left-2.1.0.tgz#16e6a3b2d44a8e138cb0838cc7cb403a4fc9e994" + resolved "https://registry.npmjs.org/pad-left/-/pad-left-2.1.0.tgz#16e6a3b2d44a8e138cb0838cc7cb403a4fc9e994" integrity sha512-HJxs9K9AztdIQIAIa/OIazRAUW/L6B9hbQDxO4X07roW3eo9XqZc2ur9bn1StH9CnbbI9EgvejHQX7CBpCF1QA== dependencies: repeat-string "^1.5.4" parent-module@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + resolved "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== dependencies: callsites "^3.0.0" parse-filepath@^1.0.1: version "1.0.2" - resolved "https://registry.yarnpkg.com/parse-filepath/-/parse-filepath-1.0.2.tgz#a632127f53aaf3d15876f5872f3ffac763d6c891" + resolved "https://registry.npmjs.org/parse-filepath/-/parse-filepath-1.0.2.tgz#a632127f53aaf3d15876f5872f3ffac763d6c891" integrity sha512-FwdRXKCohSVeXqwtYonZTXtbGJKrn+HNyWDYVcp5yuJlesTwNH4rsmRZ+GrKAPJ5bLpRxESMeS+Rl0VCHRvB2Q== dependencies: is-absolute "^1.0.0" @@ -5452,14 +5665,14 @@ parse-filepath@^1.0.1: parse-json@^2.2.0: version "2.2.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" + resolved "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" integrity sha512-QR/GGaKCkhwk1ePQNYDRKYZ3mwU9ypsKhB0XyFnLQdomyEqk3e8wpW3V5Jp88zbxK4n5ST1nqo+g9juTpownhQ== dependencies: error-ex "^1.2.0" parse-json@^5.0.0, parse-json@^5.2.0: version "5.2.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" + resolved "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== dependencies: "@babel/code-frame" "^7.0.0" @@ -5469,74 +5682,74 @@ parse-json@^5.0.0, parse-json@^5.2.0: parse-node-version@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/parse-node-version/-/parse-node-version-1.0.1.tgz#e2b5dbede00e7fa9bc363607f53327e8b073189b" + resolved "https://registry.npmjs.org/parse-node-version/-/parse-node-version-1.0.1.tgz#e2b5dbede00e7fa9bc363607f53327e8b073189b" integrity sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA== parse-passwd@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" + resolved "https://registry.npmjs.org/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" integrity sha512-1Y1A//QUXEZK7YKz+rD9WydcE1+EuPr6ZBgKecAB8tmoW6UFv0NREVJe1p+jRxtThkcbbKkfwIbWJe/IeE6m2Q== pascalcase@^0.1.1: version "0.1.1" - resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" + resolved "https://registry.npmjs.org/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" integrity sha512-XHXfu/yOQRy9vYOtUDVMN60OEJjW013GoObG1o+xwQTpB9eYJX/BjXMsdW13ZDPruFhYYn0AG22w0xgQMwl3Nw== path-dirname@^1.0.0: version "1.0.2" - resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" + resolved "https://registry.npmjs.org/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" integrity sha512-ALzNPpyNq9AqXMBjeymIjFDAkAFH06mHJH/cSBHAgU0s4vfpBn6b2nf8tiRLvagKD8RbTpq2FKTBg7cl9l3c7Q== path-exists@^2.0.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" + resolved "https://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" integrity sha512-yTltuKuhtNeFJKa1PiRzfLAU5182q1y4Eb4XCJ3PBqyzEDkAZRzBrKKBct682ls9reBVHf9udYLN5Nd+K1B9BQ== dependencies: pinkie-promise "^2.0.0" path-exists@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + resolved "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== path-is-absolute@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + resolved "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== path-key@^3.0.0, path-key@^3.1.0: version "3.1.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + resolved "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== path-parse@^1.0.7: version "1.0.7" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + resolved "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== path-root-regex@^0.1.0: version "0.1.2" - resolved "https://registry.yarnpkg.com/path-root-regex/-/path-root-regex-0.1.2.tgz#bfccdc8df5b12dc52c8b43ec38d18d72c04ba96d" + resolved "https://registry.npmjs.org/path-root-regex/-/path-root-regex-0.1.2.tgz#bfccdc8df5b12dc52c8b43ec38d18d72c04ba96d" integrity sha512-4GlJ6rZDhQZFE0DPVKh0e9jmZ5egZfxTkp7bcRDuPlJXbAwhxcl2dINPUAsjLdejqaLsCeg8axcLjIbvBjN4pQ== path-root@^0.1.1: version "0.1.1" - resolved "https://registry.yarnpkg.com/path-root/-/path-root-0.1.1.tgz#9a4a6814cac1c0cd73360a95f32083c8ea4745b7" + resolved "https://registry.npmjs.org/path-root/-/path-root-0.1.1.tgz#9a4a6814cac1c0cd73360a95f32083c8ea4745b7" integrity sha512-QLcPegTHF11axjfojBIoDygmS2E3Lf+8+jI6wOVmNVenrKSo3mFdSGiIgdSHenczw3wPtlVMQaFVwGmM7BJdtg== dependencies: path-root-regex "^0.1.0" path-scurry@^1.7.0: - version "1.9.2" - resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.9.2.tgz#90f9d296ac5e37e608028e28a447b11d385b3f63" - integrity sha512-qSDLy2aGFPm8i4rsbHd4MNyTcrzHFsLQykrtbuGRknZZCBBVXSv2tSCDN2Cg6Rt/GFRw8GoW9y9Ecw5rIPG1sg== + version "1.10.1" + resolved "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.1.tgz#9ba6bf5aa8500fe9fd67df4f0d9483b2b0bfc698" + integrity sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ== dependencies: - lru-cache "^9.1.1" - minipass "^5.0.0 || ^6.0.2" + lru-cache "^9.1.1 || ^10.0.0" + minipass "^5.0.0 || ^6.0.2 || ^7.0.0" path-type@^1.0.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" + resolved "https://registry.npmjs.org/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" integrity sha512-S4eENJz1pkiQn9Znv33Q+deTOKmbl+jj1Fl+qiP/vYezj+S8x+J3Uo0ISrx/QoEvIlOaDWJhPaRd1flJ9HXZqg== dependencies: graceful-fs "^4.1.2" @@ -5545,61 +5758,61 @@ path-type@^1.0.0: path-type@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + resolved "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== picocolors@^0.2.1: version "0.2.1" - resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-0.2.1.tgz#570670f793646851d1ba135996962abad587859f" + resolved "https://registry.npmjs.org/picocolors/-/picocolors-0.2.1.tgz#570670f793646851d1ba135996962abad587859f" integrity sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA== picocolors@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + resolved "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== picomatch@^2.0.4, picomatch@^2.2.3, picomatch@^2.3.1: version "2.3.1" - resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + resolved "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== pify@^2.0.0: version "2.3.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + resolved "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== pinkie-promise@^2.0.0: version "2.0.1" - resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" + resolved "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" integrity sha512-0Gni6D4UcLTbv9c57DfxDGdr41XfgUjqWZu492f0cIGr16zDU06BWP/RAEvOuo7CQ0CNjHaLlM59YJJFm3NWlw== dependencies: pinkie "^2.0.0" pinkie@^2.0.0: version "2.0.4" - resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" + resolved "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" integrity sha512-MnUuEycAemtSaeFSjXKW/aroV7akBbY+Sv+RkyqFjgAe73F+MR0TBWKBRDkmfWq/HiFmdavfZ1G7h4SPZXaCSg== pirates@^4.0.4: - version "4.0.5" - resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" - integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== + version "4.0.6" + resolved "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz#3018ae32ecfcff6c29ba2267cbf21166ac1f36b9" + integrity sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg== pkg-dir@^4.2.0: version "4.2.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + resolved "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== dependencies: find-up "^4.0.0" platform@^1.3.3: version "1.3.6" - resolved "https://registry.yarnpkg.com/platform/-/platform-1.3.6.tgz#48b4ce983164b209c2d45a107adb31f473a6e7a7" + resolved "https://registry.npmjs.org/platform/-/platform-1.3.6.tgz#48b4ce983164b209c2d45a107adb31f473a6e7a7" integrity sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg== plugin-error@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/plugin-error/-/plugin-error-1.0.1.tgz#77016bd8919d0ac377fdcdd0322328953ca5781c" + resolved "https://registry.npmjs.org/plugin-error/-/plugin-error-1.0.1.tgz#77016bd8919d0ac377fdcdd0322328953ca5781c" integrity sha512-L1zP0dk7vGweZME2i+EeakvUNqSrdiI3F91TwEoYiGrAfUXmVv6fJIq4g82PAXxNsWOp0J7ZqQy/3Szz0ajTxA== dependencies: ansi-colors "^1.0.1" @@ -5609,24 +5822,24 @@ plugin-error@^1.0.1: plugin-error@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/plugin-error/-/plugin-error-2.0.1.tgz#f2ac92bac8c85e3e23492d76d0c3ca12f30eb00b" + resolved "https://registry.npmjs.org/plugin-error/-/plugin-error-2.0.1.tgz#f2ac92bac8c85e3e23492d76d0c3ca12f30eb00b" integrity sha512-zMakqvIDyY40xHOvzXka0kUvf40nYIuwRE8dWhti2WtjQZ31xAgBZBhxsK7vK3QbRXS1Xms/LO7B5cuAsfB2Gg== dependencies: ansi-colors "^1.0.1" pluralize@^8.0.0: version "8.0.0" - resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-8.0.0.tgz#1a6fa16a38d12a1901e0320fa017051c539ce3b1" + resolved "https://registry.npmjs.org/pluralize/-/pluralize-8.0.0.tgz#1a6fa16a38d12a1901e0320fa017051c539ce3b1" integrity sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA== posix-character-classes@^0.1.0: version "0.1.1" - resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" + resolved "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" integrity sha512-xTgYBc3fuo7Yt7JbiuFxSYGToMoz8fLoE6TC9Wx1P/u+LfeThMOAqmuyECnlBaaJb+u1m9hHiXUEtwW4OzfUJg== postcss@^7.0.16: version "7.0.39" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.39.tgz#9624375d965630e2e1f2c02a935c82a59cb48309" + resolved "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz#9624375d965630e2e1f2c02a935c82a59cb48309" integrity sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA== dependencies: picocolors "^0.2.1" @@ -5634,38 +5847,38 @@ postcss@^7.0.16: prelude-ls@^1.2.1: version "1.2.1" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + resolved "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== -pretty-format@^29.0.0, pretty-format@^29.6.2: - version "29.6.2" - resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-29.6.2.tgz#3d5829261a8a4d89d8b9769064b29c50ed486a47" - integrity sha512-1q0oC8eRveTg5nnBEWMXAU2qpv65Gnuf2eCQzSjxpWFkPaPARwqZZDGuNE0zPAZfTCHzIk3A8dIjwlQKKLphyg== +pretty-format@^29.0.0, pretty-format@^29.7.0: + version "29.7.0" + resolved "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz#ca42c758310f365bfa71a0bda0a807160b776812" + integrity sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ== dependencies: - "@jest/schemas" "^29.6.0" + "@jest/schemas" "^29.6.3" ansi-styles "^5.0.0" react-is "^18.0.0" pretty-hrtime@^1.0.0: version "1.0.3" - resolved "https://registry.yarnpkg.com/pretty-hrtime/-/pretty-hrtime-1.0.3.tgz#b7e3ea42435a4c9b2759d99e0f201eb195802ee1" + resolved "https://registry.npmjs.org/pretty-hrtime/-/pretty-hrtime-1.0.3.tgz#b7e3ea42435a4c9b2759d99e0f201eb195802ee1" integrity sha512-66hKPCr+72mlfiSjlEB1+45IjXSqvVAIy6mocupoww4tBFE9R9IhwwUGoI4G++Tc9Aq+2rxOt0RFU6gPcrte0A== process-nextick-args@^2.0.0, process-nextick-args@~2.0.0: version "2.0.1" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + resolved "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== promise@^8.0.1: version "8.3.0" - resolved "https://registry.yarnpkg.com/promise/-/promise-8.3.0.tgz#8cb333d1edeb61ef23869fbb8a4ea0279ab60e0a" + resolved "https://registry.npmjs.org/promise/-/promise-8.3.0.tgz#8cb333d1edeb61ef23869fbb8a4ea0279ab60e0a" integrity sha512-rZPNPKTOYVNEEKFaq1HqTgOwZD+4/YHS5ukLzQCypkj+OkYx7iv0mA91lJlpPPZ8vMau3IIGj5Qlwrx+8iiSmg== dependencies: asap "~2.0.6" prompts@^2.0.1: version "2.4.2" - resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + resolved "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== dependencies: kleur "^3.0.3" @@ -5673,12 +5886,12 @@ prompts@^2.0.1: prr@~1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" + resolved "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" integrity sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw== pump@^2.0.0: version "2.0.1" - resolved "https://registry.yarnpkg.com/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909" + resolved "https://registry.npmjs.org/pump/-/pump-2.0.1.tgz#12399add6e4cf7526d973cbc8b5ce2e2908b3909" integrity sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA== dependencies: end-of-stream "^1.1.0" @@ -5686,7 +5899,7 @@ pump@^2.0.0: pumpify@^1.3.5: version "1.5.1" - resolved "https://registry.yarnpkg.com/pumpify/-/pumpify-1.5.1.tgz#36513be246ab27570b1a374a5ce278bfd74370ce" + resolved "https://registry.npmjs.org/pumpify/-/pumpify-1.5.1.tgz#36513be246ab27570b1a374a5ce278bfd74370ce" integrity sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ== dependencies: duplexify "^3.6.0" @@ -5695,32 +5908,32 @@ pumpify@^1.3.5: punycode@^2.1.0: version "2.3.0" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.0.tgz#f67fa67c94da8f4d0cfff981aee4118064199b8f" + resolved "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz#f67fa67c94da8f4d0cfff981aee4118064199b8f" integrity sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA== pure-rand@^6.0.0: - version "6.0.2" - resolved "https://registry.yarnpkg.com/pure-rand/-/pure-rand-6.0.2.tgz#a9c2ddcae9b68d736a8163036f088a2781c8b306" - integrity sha512-6Yg0ekpKICSjPswYOuC5sku/TSWaRYlA0qsXqJgM/d/4pLPHPuTxK7Nbf7jFKzAeedUhR8C7K9Uv63FBsSo8xQ== + version "6.0.3" + resolved "https://registry.npmjs.org/pure-rand/-/pure-rand-6.0.3.tgz#3c9e6b53c09e52ac3cedffc85ab7c1c7094b38cb" + integrity sha512-KddyFewCsO0j3+np81IQ+SweXLDnDQTs5s67BOnrYmYe/yNmUhttQyGsYzy8yUnoljGAQ9sl38YB4vH8ur7Y+w== queue-microtask@^1.2.2: version "1.2.3" - resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + resolved "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== queue-tick@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/queue-tick/-/queue-tick-1.0.1.tgz#f6f07ac82c1fd60f82e098b417a80e52f1f4c142" + resolved "https://registry.npmjs.org/queue-tick/-/queue-tick-1.0.1.tgz#f6f07ac82c1fd60f82e098b417a80e52f1f4c142" integrity sha512-kJt5qhMxoszgU/62PLP1CJytzd2NKetjSRnyuj31fDd3Rlcz3fzlFdFLD1SItunPwyqEOkca6GbV612BWfaBag== quick-lru@^5.1.1: version "5.1.1" - resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" + resolved "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== randomatic@3.1.1: version "3.1.1" - resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-3.1.1.tgz#b776efc59375984e36c537b2f51a1f0aff0da1ed" + resolved "https://registry.npmjs.org/randomatic/-/randomatic-3.1.1.tgz#b776efc59375984e36c537b2f51a1f0aff0da1ed" integrity sha512-TuDE5KxZ0J461RVjrJZCJc+J+zCkTb1MbH9AQUq68sMhOMcy9jLcb3BrZKgp9q9Ncltdg4QVqWrH02W2EFFVYw== dependencies: is-number "^4.0.0" @@ -5729,19 +5942,19 @@ randomatic@3.1.1: randombytes@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" + resolved "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== dependencies: safe-buffer "^5.1.0" react-is@^18.0.0: version "18.2.0" - resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b" + resolved "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b" integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== read-pkg-up@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" + resolved "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" integrity sha512-WD9MTlNtI55IwYUS27iHh9tK3YoIVhxis8yKhLpTqWtml739uXc9NWTpxoHkfZf3+DkCCsXox94/VWZniuZm6A== dependencies: find-up "^1.0.0" @@ -5749,7 +5962,7 @@ read-pkg-up@^1.0.1: read-pkg-up@^7.0.1: version "7.0.1" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-7.0.1.tgz#f3a6135758459733ae2b95638056e1854e7ef507" + resolved "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz#f3a6135758459733ae2b95638056e1854e7ef507" integrity sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg== dependencies: find-up "^4.1.0" @@ -5758,7 +5971,7 @@ read-pkg-up@^7.0.1: read-pkg-up@^8.0.0: version "8.0.0" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-8.0.0.tgz#72f595b65e66110f43b052dd9af4de6b10534670" + resolved "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-8.0.0.tgz#72f595b65e66110f43b052dd9af4de6b10534670" integrity sha512-snVCqPczksT0HS2EC+SxUndvSzn6LRCwpfSvLrIfR5BKDQQZMaI6jPRC9dYvYFDRAuFEAnkwww8kBBNE/3VvzQ== dependencies: find-up "^5.0.0" @@ -5767,7 +5980,7 @@ read-pkg-up@^8.0.0: read-pkg@^1.0.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" + resolved "https://registry.npmjs.org/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" integrity sha512-7BGwRHqt4s/uVbuyoeejRn4YmFnYZiFl4AuaeXHlgZf3sONF0SOGlxs2Pw8g6hCKupo08RafIO5YXFNOKTfwsQ== dependencies: load-json-file "^1.0.0" @@ -5776,7 +5989,7 @@ read-pkg@^1.0.0: read-pkg@^5.2.0: version "5.2.0" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-5.2.0.tgz#7bf295438ca5a33e56cd30e053b34ee7250c93cc" + resolved "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz#7bf295438ca5a33e56cd30e053b34ee7250c93cc" integrity sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg== dependencies: "@types/normalize-package-data" "^2.4.0" @@ -5786,7 +5999,7 @@ read-pkg@^5.2.0: read-pkg@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-6.0.0.tgz#a67a7d6a1c2b0c3cd6aa2ea521f40c458a4a504c" + resolved "https://registry.npmjs.org/read-pkg/-/read-pkg-6.0.0.tgz#a67a7d6a1c2b0c3cd6aa2ea521f40c458a4a504c" integrity sha512-X1Fu3dPuk/8ZLsMhEj5f4wFAF0DWoK7qhGJvgaijocXxBmSToKfbFtqbxMO7bVjNA1dmE5huAzjXj/ey86iw9Q== dependencies: "@types/normalize-package-data" "^2.4.0" @@ -5796,7 +6009,7 @@ read-pkg@^6.0.0: "readable-stream@2 || 3", readable-stream@3, readable-stream@^3.6.0: version "3.6.2" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" + resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== dependencies: inherits "^2.0.3" @@ -5805,7 +6018,7 @@ read-pkg@^6.0.0: readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.5, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.5, readable-stream@^2.3.6, readable-stream@~2.3.6: version "2.3.8" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.8.tgz#91125e8042bba1b9887f49345f6277027ce8be9b" + resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz#91125e8042bba1b9887f49345f6277027ce8be9b" integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== dependencies: core-util-is "~1.0.0" @@ -5818,7 +6031,7 @@ readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable readdirp@^2.2.1: version "2.2.1" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" + resolved "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== dependencies: graceful-fs "^4.1.11" @@ -5827,22 +6040,27 @@ readdirp@^2.2.1: rechoir@^0.6.2: version "0.6.2" - resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384" + resolved "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384" integrity sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw== dependencies: resolve "^1.1.6" redent@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/redent/-/redent-4.0.0.tgz#0c0ba7caabb24257ab3bb7a4fd95dd1d5c5681f9" + resolved "https://registry.npmjs.org/redent/-/redent-4.0.0.tgz#0c0ba7caabb24257ab3bb7a4fd95dd1d5c5681f9" integrity sha512-tYkDkVVtYkSVhuQ4zBgfvciymHaeuel+zFKXShfDnFP5SyVEP7qo70Rf1jTOTCx3vGNAbnEi/xFkcfQVMIBWag== dependencies: indent-string "^5.0.0" strip-indent "^4.0.0" +regenerator-runtime@0.14.0: + version "0.14.0" + resolved "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.0.tgz#5e19d68eb12d486f797e15a3c6a918f7cec5eb45" + integrity sha512-srw17NI0TUWHuGa5CFGGmhfNIeja30WMBfbslPNhf6JrqQlLN5gcrvig1oqPxiVaXb0oW0XRKtH6Nngs5lKCIA== + regex-not@^1.0.0, regex-not@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" + resolved "https://registry.npmjs.org/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== dependencies: extend-shallow "^3.0.2" @@ -5850,19 +6068,19 @@ regex-not@^1.0.0, regex-not@^1.0.2: regexp-tree@^0.1.24, regexp-tree@~0.1.1: version "0.1.27" - resolved "https://registry.yarnpkg.com/regexp-tree/-/regexp-tree-0.1.27.tgz#2198f0ef54518ffa743fe74d983b56ffd631b6cd" + resolved "https://registry.npmjs.org/regexp-tree/-/regexp-tree-0.1.27.tgz#2198f0ef54518ffa743fe74d983b56ffd631b6cd" integrity sha512-iETxpjK6YoRWJG5o6hXLwvjYAoW+FEZn9os0PD/b6AP6xQwsa/Y7lCVgIixBbUPMfhu+i2LtdeAqVTgGlQarfA== regjsparser@^0.10.0: version "0.10.0" - resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.10.0.tgz#b1ed26051736b436f22fdec1c8f72635f9f44892" + resolved "https://registry.npmjs.org/regjsparser/-/regjsparser-0.10.0.tgz#b1ed26051736b436f22fdec1c8f72635f9f44892" integrity sha512-qx+xQGZVsy55CH0a1hiVwHmqjLryfh7wQyF5HO07XJ9f7dQMY/gPQHhlyDkIzJKC+x2fUCpCcUODUUUFrm7SHA== dependencies: jsesc "~0.5.0" remove-bom-buffer@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/remove-bom-buffer/-/remove-bom-buffer-3.0.0.tgz#c2bf1e377520d324f623892e33c10cac2c252b53" + resolved "https://registry.npmjs.org/remove-bom-buffer/-/remove-bom-buffer-3.0.0.tgz#c2bf1e377520d324f623892e33c10cac2c252b53" integrity sha512-8v2rWhaakv18qcvNeli2mZ/TMTL2nEyAKRvzo1WtnZBl15SHyEhrCu2/xKlJyUFKHiHgfXIyuY6g2dObJJycXQ== dependencies: is-buffer "^1.1.5" @@ -5870,7 +6088,7 @@ remove-bom-buffer@^3.0.0: remove-bom-stream@^1.2.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/remove-bom-stream/-/remove-bom-stream-1.2.0.tgz#05f1a593f16e42e1fb90ebf59de8e569525f9523" + resolved "https://registry.npmjs.org/remove-bom-stream/-/remove-bom-stream-1.2.0.tgz#05f1a593f16e42e1fb90ebf59de8e569525f9523" integrity sha512-wigO8/O08XHb8YPzpDDT+QmRANfW6vLqxfaXm1YXhnFf3AkSLyjfG3GEFg4McZkmgL7KvCj5u2KczkvSP6NfHA== dependencies: remove-bom-buffer "^3.0.0" @@ -5879,32 +6097,32 @@ remove-bom-stream@^1.2.0: remove-trailing-separator@^1.0.1, remove-trailing-separator@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" + resolved "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" integrity sha512-/hS+Y0u3aOfIETiaiirUFwDBDzmXPvO+jAfKTitUngIPzdKc6Z0LoFjM/CK5PL4C+eKwHohlHAb6H0VFfmmUsw== repeat-element@^1.1.2: version "1.1.4" - resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.4.tgz#be681520847ab58c7568ac75fbfad28ed42d39e9" + resolved "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.4.tgz#be681520847ab58c7568ac75fbfad28ed42d39e9" integrity sha512-LFiNfRcSu7KK3evMyYOuCzv3L10TW7yC1G2/+StMjK8Y6Vqd2MG7r/Qjw4ghtuCOjFvlnms/iMmLqpvW/ES/WQ== repeat-string@^1.5.4, repeat-string@^1.6.1: version "1.6.1" - resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" + resolved "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w== replace-ext@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/replace-ext/-/replace-ext-1.0.1.tgz#2d6d996d04a15855d967443631dd5f77825b016a" + resolved "https://registry.npmjs.org/replace-ext/-/replace-ext-1.0.1.tgz#2d6d996d04a15855d967443631dd5f77825b016a" integrity sha512-yD5BHCe7quCgBph4rMQ+0KkIRKwWCrHDOX1p1Gp6HwjPM5kVoCdKGNhN7ydqqsX6lJEnQDKZ/tFMiEdQ1dvPEw== replace-ext@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/replace-ext/-/replace-ext-2.0.0.tgz#9471c213d22e1bcc26717cd6e50881d88f812b06" + resolved "https://registry.npmjs.org/replace-ext/-/replace-ext-2.0.0.tgz#9471c213d22e1bcc26717cd6e50881d88f812b06" integrity sha512-UszKE5KVK6JvyD92nzMn9cDapSk6w/CaFZ96CnmDMUqH9oowfxF/ZjRITD25H4DnOQClLA4/j7jLGXXLVKxAug== replace-homedir@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/replace-homedir/-/replace-homedir-1.0.0.tgz#e87f6d513b928dde808260c12be7fec6ff6e798c" + resolved "https://registry.npmjs.org/replace-homedir/-/replace-homedir-1.0.0.tgz#e87f6d513b928dde808260c12be7fec6ff6e798c" integrity sha512-CHPV/GAglbIB1tnQgaiysb8H2yCy8WQ7lcEwQ/eT+kLj0QHV8LnJW0zpqpE7RSkrMSRoa+EBoag86clf7WAgSg== dependencies: homedir-polyfill "^1.0.1" @@ -5913,7 +6131,7 @@ replace-homedir@^1.0.0: replacestream@^4.0.3: version "4.0.3" - resolved "https://registry.yarnpkg.com/replacestream/-/replacestream-4.0.3.tgz#3ee5798092be364b1cdb1484308492cb3dff2f36" + resolved "https://registry.npmjs.org/replacestream/-/replacestream-4.0.3.tgz#3ee5798092be364b1cdb1484308492cb3dff2f36" integrity sha512-AC0FiLS352pBBiZhd4VXB1Ab/lh0lEgpP+GGvZqbQh8a5cmXVoTe5EX/YeTFArnp4SRGTHh1qCHu9lGs1qG8sA== dependencies: escape-string-regexp "^1.0.3" @@ -5922,24 +6140,24 @@ replacestream@^4.0.3: require-directory@^2.1.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + resolved "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== require-main-filename@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" + resolved "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" integrity sha512-IqSUtOVP4ksd1C/ej5zeEh/BIP2ajqpn8c5x+q99gvcIG/Qf0cud5raVnE/Dwd0ua9TXYDoDc0RE5hBSdz22Ug== resolve-cwd@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" + resolved "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== dependencies: resolve-from "^5.0.0" resolve-dir@^1.0.0, resolve-dir@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43" + resolved "https://registry.npmjs.org/resolve-dir/-/resolve-dir-1.0.1.tgz#79a40644c362be82f26effe739c9bb5382046f43" integrity sha512-R7uiTjECzvOsWSfdM0QKFNBVFcK27aHOUwdvK53BcW8zqnGdYp0Fbj82cy54+2A4P2tFM22J5kRfe1R+lM/1yg== dependencies: expand-tilde "^2.0.0" @@ -5947,43 +6165,43 @@ resolve-dir@^1.0.0, resolve-dir@^1.0.1: resolve-from@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== resolve-from@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== resolve-options@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/resolve-options/-/resolve-options-1.1.0.tgz#32bb9e39c06d67338dc9378c0d6d6074566ad131" + resolved "https://registry.npmjs.org/resolve-options/-/resolve-options-1.1.0.tgz#32bb9e39c06d67338dc9378c0d6d6074566ad131" integrity sha512-NYDgziiroVeDC29xq7bp/CacZERYsA9bXYd1ZmcJlF3BcrZv5pTb4NG7SjdyKDnXZ84aC4vo2u6sNKIA1LCu/A== dependencies: value-or-function "^3.0.0" resolve-url@^0.2.1: version "0.2.1" - resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" + resolved "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" integrity sha512-ZuF55hVUQaaczgOIwqWzkEcEidmlD/xl44x1UZnhOXcYuFN2S6+rcxpG+C1N3So0wvNI3DmJICUFfu2SxhBmvg== resolve.exports@^2.0.0: version "2.0.2" - resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-2.0.2.tgz#f8c934b8e6a13f539e38b7098e2e36134f01e800" + resolved "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.2.tgz#f8c934b8e6a13f539e38b7098e2e36134f01e800" integrity sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg== resolve@^1.1.6, resolve@^1.1.7, resolve@^1.10.0, resolve@^1.20.0, resolve@^1.22.1, resolve@^1.4.0: - version "1.22.2" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.2.tgz#0ed0943d4e301867955766c9f3e1ae6d01c6845f" - integrity sha512-Sb+mjNHOULsBv818T40qSPeRiuWLyaGMa5ewydRLFimneixmVy2zdivRl+AF6jaYPC8ERxGDmFSiqui6SfPd+g== + version "1.22.4" + resolved "https://registry.npmjs.org/resolve/-/resolve-1.22.4.tgz#1dc40df46554cdaf8948a486a10f6ba1e2026c34" + integrity sha512-PXNdCiPqDqeUou+w1C2eTQbNfxKSuMxqTCuvlmmMsk1NWHL5fRrhY6Pl0qEYYc6+QqGClco1Qj8XnjPego4wfg== dependencies: - is-core-module "^2.11.0" + is-core-module "^2.13.0" path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" restore-cursor@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" + resolved "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" integrity sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA== dependencies: onetime "^5.1.0" @@ -5991,75 +6209,87 @@ restore-cursor@^3.1.0: ret@~0.1.10: version "0.1.15" - resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" + resolved "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== reusify@^1.0.4: version "1.0.4" - resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + resolved "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== rimraf@^3.0.2: version "3.0.2" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + resolved "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== dependencies: glob "^7.1.3" -rollup@3.25.0: - version "3.25.0" - resolved "https://registry.yarnpkg.com/rollup/-/rollup-3.25.0.tgz#71327d396a9decbf23c87b55916ae7204211738a" - integrity sha512-FnJkNRst2jEZGw7f+v4hFo6UTzpDKrAKcHZWcEfm5/GJQ5CK7wgb4moNLNAe7npKUev7yQn1AY/YbZRIxOv6Qg== +rollup@4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-4.3.0.tgz#198e6ae4355899db630d75bc0e17b53f5d0fc20e" + integrity sha512-scIi1NrKLDIYSPK66jjECtII7vIgdAMFmFo8h6qm++I6nN9qDSV35Ku6erzGVqYjx+lj+j5wkusRMr++8SyDZg== optionalDependencies: + "@rollup/rollup-android-arm-eabi" "4.3.0" + "@rollup/rollup-android-arm64" "4.3.0" + "@rollup/rollup-darwin-arm64" "4.3.0" + "@rollup/rollup-darwin-x64" "4.3.0" + "@rollup/rollup-linux-arm-gnueabihf" "4.3.0" + "@rollup/rollup-linux-arm64-gnu" "4.3.0" + "@rollup/rollup-linux-arm64-musl" "4.3.0" + "@rollup/rollup-linux-x64-gnu" "4.3.0" + "@rollup/rollup-linux-x64-musl" "4.3.0" + "@rollup/rollup-win32-arm64-msvc" "4.3.0" + "@rollup/rollup-win32-ia32-msvc" "4.3.0" + "@rollup/rollup-win32-x64-msvc" "4.3.0" fsevents "~2.3.2" run-parallel@^1.1.9: version "1.2.0" - resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + resolved "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== dependencies: queue-microtask "^1.2.2" rxjs@7.8.1: version "7.8.1" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.1.tgz#6f6f3d99ea8044291efd92e7c7fcf562c4057543" + resolved "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz#6f6f3d99ea8044291efd92e7c7fcf562c4057543" integrity sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg== dependencies: tslib "^2.1.0" safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@~5.2.0: version "5.2.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== safe-regex@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" + resolved "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" integrity sha512-aJXcif4xnaNUzvUuC5gcb46oTS7zvg4jpMTnuqtrEPlR3vFr4pxtdTwaF1Qs3Enjn9HK+ZlwQui+a7z0SywIzg== dependencies: ret "~0.1.10" safe-regex@^2.1.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-2.1.1.tgz#f7128f00d056e2fe5c11e81a1324dd974aadced2" + resolved "https://registry.npmjs.org/safe-regex/-/safe-regex-2.1.1.tgz#f7128f00d056e2fe5c11e81a1324dd974aadced2" integrity sha512-rx+x8AMzKb5Q5lQ95Zoi6ZbJqwCLkqi3XuJXp5P3rT8OEc6sZCJG5AE5dU3lsgRr/F4Bs31jSlVN+j5KrsGu9A== dependencies: regexp-tree "~0.1.1" sax@>=0.6.0: version "1.2.4" - resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" + resolved "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== schema-utils@^3.1.1, schema-utils@^3.1.2: - version "3.2.0" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.2.0.tgz#7dff4881064a4f22c09f0c6a1457feb820fd0636" - integrity sha512-0zTyLGyDJYd/MBxG1AhJkKa6fpEBds4OQO2ut0w7OYG+ZGhGea09lijvzsqegYSik88zc7cUtIlnnO+/BvD6gQ== + version "3.3.0" + resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz#f50a88877c3c01652a15b622ae9e9795df7a60fe" + integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== dependencies: "@types/json-schema" "^7.0.8" ajv "^6.12.5" @@ -6067,43 +6297,43 @@ schema-utils@^3.1.1, schema-utils@^3.1.2: semver-greatest-satisfied-range@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/semver-greatest-satisfied-range/-/semver-greatest-satisfied-range-1.1.0.tgz#13e8c2658ab9691cb0cd71093240280d36f77a5b" + resolved "https://registry.npmjs.org/semver-greatest-satisfied-range/-/semver-greatest-satisfied-range-1.1.0.tgz#13e8c2658ab9691cb0cd71093240280d36f77a5b" integrity sha512-Ny/iyOzSSa8M5ML46IAx3iXc6tfOsYU2R4AXi2UpHk60Zrgyq6eqPj/xiOfS0rRl/iiQ/rdJkVjw/5cdUyCntQ== dependencies: sver-compat "^1.5.0" "semver@2 || 3 || 4 || 5": version "5.7.2" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8" + resolved "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8" integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== -semver@^6.0.0, semver@^6.3.0: +semver@^6.3.0, semver@^6.3.1: version "6.3.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" + resolved "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== -semver@^7.3.4, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3: +semver@^7.3.4, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3, semver@^7.5.4: version "7.5.4" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" + resolved "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== dependencies: lru-cache "^6.0.0" serialize-javascript@^6.0.1: version "6.0.1" - resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.1.tgz#b206efb27c3da0b0ab6b52f48d170b7996458e5c" + resolved "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.1.tgz#b206efb27c3da0b0ab6b52f48d170b7996458e5c" integrity sha512-owoXEFjWRllis8/M1Q+Cw5k8ZH40e3zhp/ovX+Xr/vi1qj6QesbyXXViFbpNvWvPNAD62SutwEXavefrLJWj7w== dependencies: randombytes "^2.1.0" set-blocking@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + resolved "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw== set-value@^2.0.0, set-value@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" + resolved "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== dependencies: extend-shallow "^2.0.1" @@ -6113,20 +6343,20 @@ set-value@^2.0.0, set-value@^2.0.1: shebang-command@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + resolved "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== dependencies: shebang-regex "^3.0.0" shebang-regex@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + resolved "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== shiki@^0.14.1: - version "0.14.2" - resolved "https://registry.yarnpkg.com/shiki/-/shiki-0.14.2.tgz#d51440800b701392b31ce2336036058e338247a1" - integrity sha512-ltSZlSLOuSY0M0Y75KA+ieRaZ0Trf5Wl3gutE7jzLuIcWxLp5i/uEnLoQWNvgKXQ5OMpGkJnVMRLAuzjc0LJ2A== + version "0.14.4" + resolved "https://registry.npmjs.org/shiki/-/shiki-0.14.4.tgz#2454969b466a5f75067d0f2fa0d7426d32881b20" + integrity sha512-IXCRip2IQzKwxArNNq1S+On4KPML3Yyn8Zzs/xRgcgOWIr8ntIK3IKzjFPfjy/7kt9ZMjc+FItfqHRBg8b6tNQ== dependencies: ansi-sequence-parser "^1.1.0" jsonc-parser "^3.2.0" @@ -6135,17 +6365,17 @@ shiki@^0.14.1: signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7: version "3.0.7" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== signal-exit@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.0.2.tgz#ff55bb1d9ff2114c13b400688fa544ac63c36967" - integrity sha512-MY2/qGx4enyjprQnFaZsHib3Yadh3IXyV2C321GY0pjGfVBu4un0uDJkwgdxqO+Rdx8JMT8IfJIRwbYVz3Ob3Q== + version "4.1.0" + resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" + integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== sirv@^2.0.3: version "2.0.3" - resolved "https://registry.yarnpkg.com/sirv/-/sirv-2.0.3.tgz#ca5868b87205a74bef62a469ed0296abceccd446" + resolved "https://registry.npmjs.org/sirv/-/sirv-2.0.3.tgz#ca5868b87205a74bef62a469ed0296abceccd446" integrity sha512-O9jm9BsID1P+0HOi81VpXPoDxYP374pkOLzACAoyUQ/3OUVndNpsz6wMnY2z+yOxzbllCKZrM+9QrWsv4THnyA== dependencies: "@polka/url" "^1.0.0-next.20" @@ -6154,22 +6384,22 @@ sirv@^2.0.3: sisteransi@^1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" + resolved "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== slash@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + resolved "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== slash@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" + resolved "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== slice-ansi@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-4.0.0.tgz#500e8dd0fd55b05815086255b3195adf2a45fe6b" + resolved "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz#500e8dd0fd55b05815086255b3195adf2a45fe6b" integrity sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ== dependencies: ansi-styles "^4.0.0" @@ -6178,7 +6408,7 @@ slice-ansi@^4.0.0: snapdragon-node@^2.0.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" + resolved "https://registry.npmjs.org/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== dependencies: define-property "^1.0.0" @@ -6187,14 +6417,14 @@ snapdragon-node@^2.0.1: snapdragon-util@^3.0.1: version "3.0.1" - resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" + resolved "https://registry.npmjs.org/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== dependencies: kind-of "^3.2.0" snapdragon@^0.8.1: version "0.8.2" - resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" + resolved "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== dependencies: base "^0.11.1" @@ -6208,7 +6438,7 @@ snapdragon@^0.8.1: source-map-resolve@^0.5.0: version "0.5.3" - resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a" + resolved "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a" integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw== dependencies: atob "^2.1.2" @@ -6219,7 +6449,7 @@ source-map-resolve@^0.5.0: source-map-resolve@^0.6.0: version "0.6.0" - resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.6.0.tgz#3d9df87e236b53f16d01e58150fc7711138e5ed2" + resolved "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.6.0.tgz#3d9df87e236b53f16d01e58150fc7711138e5ed2" integrity sha512-KXBr9d/fO/bWo97NXsPIAW1bFSBOuCnjbNTBMO7N59hsv5i9yzRDfcYwwt0l04+VqnKC+EwzvJZIP/qkuMgR/w== dependencies: atob "^2.1.2" @@ -6227,7 +6457,7 @@ source-map-resolve@^0.6.0: source-map-support@0.5.13: version "0.5.13" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932" + resolved "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932" integrity sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w== dependencies: buffer-from "^1.0.0" @@ -6235,7 +6465,7 @@ source-map-support@0.5.13: source-map-support@~0.5.20: version "0.5.21" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" + resolved "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== dependencies: buffer-from "^1.0.0" @@ -6243,32 +6473,32 @@ source-map-support@~0.5.20: source-map-url@^0.4.0: version "0.4.1" - resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.1.tgz#0af66605a745a5a2f91cf1bbf8a7afbc283dec56" + resolved "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.1.tgz#0af66605a745a5a2f91cf1bbf8a7afbc283dec56" integrity sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw== source-map@^0.5.1, source-map@^0.5.6: version "0.5.7" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" + resolved "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ== source-map@^0.6.0, source-map@^0.6.1: version "0.6.1" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== source-map@^0.7.3: version "0.7.4" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.4.tgz#a9bbe705c9d8846f4e08ff6765acf0f1b0898656" + resolved "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz#a9bbe705c9d8846f4e08ff6765acf0f1b0898656" integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA== sparkles@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/sparkles/-/sparkles-1.0.1.tgz#008db65edce6c50eec0c5e228e1945061dd0437c" + resolved "https://registry.npmjs.org/sparkles/-/sparkles-1.0.1.tgz#008db65edce6c50eec0c5e228e1945061dd0437c" integrity sha512-dSO0DDYUahUt/0/pD/Is3VIm5TGJjludZ0HVymmhYF6eNA53PVLhnUk0znSYbH8IYBuJdCE+1luR22jNLMaQdw== spdx-correct@^3.0.0: version "3.2.0" - resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.2.0.tgz#4f5ab0668f0059e34f9c00dce331784a12de4e9c" + resolved "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz#4f5ab0668f0059e34f9c00dce331784a12de4e9c" integrity sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA== dependencies: spdx-expression-parse "^3.0.0" @@ -6276,12 +6506,12 @@ spdx-correct@^3.0.0: spdx-exceptions@^2.1.0: version "2.3.0" - resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz#3f28ce1a77a00372683eade4a433183527a2163d" + resolved "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz#3f28ce1a77a00372683eade4a433183527a2163d" integrity sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A== spdx-expression-parse@^3.0.0: version "3.0.1" - resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679" + resolved "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679" integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q== dependencies: spdx-exceptions "^2.1.0" @@ -6289,36 +6519,36 @@ spdx-expression-parse@^3.0.0: spdx-license-ids@^3.0.0: version "3.0.13" - resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.13.tgz#7189a474c46f8d47c7b0da4b987bb45e908bd2d5" + resolved "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.13.tgz#7189a474c46f8d47c7b0da4b987bb45e908bd2d5" integrity sha512-XkD+zwiqXHikFZm4AX/7JSCXA98U5Db4AFd5XUg/+9UNtnH75+Z9KxtpYiJZx36mUDVOwH83pl7yvCer6ewM3w== split-string@^3.0.1, split-string@^3.0.2: version "3.1.0" - resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" + resolved "https://registry.npmjs.org/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== dependencies: extend-shallow "^3.0.0" sprintf-js@~1.0.2: version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + resolved "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== stack-trace@0.0.10: version "0.0.10" - resolved "https://registry.yarnpkg.com/stack-trace/-/stack-trace-0.0.10.tgz#547c70b347e8d32b4e108ea1a2a159e5fdde19c0" + resolved "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz#547c70b347e8d32b4e108ea1a2a159e5fdde19c0" integrity sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg== stack-utils@^2.0.3: version "2.0.6" - resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.6.tgz#aaf0748169c02fc33c8232abccf933f54a1cc34f" + resolved "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz#aaf0748169c02fc33c8232abccf933f54a1cc34f" integrity sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ== dependencies: escape-string-regexp "^2.0.0" static-extend@^0.1.1: version "0.1.2" - resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" + resolved "https://registry.npmjs.org/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" integrity sha512-72E9+uLc27Mt718pMHt9VMNiAL4LMsmDbBva8mxWUCkT07fSzEGMYUCk0XWY6lp0j6RBAG4cJ3mWuZv2OE3s0g== dependencies: define-property "^0.2.5" @@ -6326,30 +6556,30 @@ static-extend@^0.1.1: stream-exhaust@^1.0.1, stream-exhaust@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/stream-exhaust/-/stream-exhaust-1.0.2.tgz#acdac8da59ef2bc1e17a2c0ccf6c320d120e555d" + resolved "https://registry.npmjs.org/stream-exhaust/-/stream-exhaust-1.0.2.tgz#acdac8da59ef2bc1e17a2c0ccf6c320d120e555d" integrity sha512-b/qaq/GlBK5xaq1yrK9/zFcyRSTNxmcZwFLGSTG0mXgZl/4Z6GgiyYOXOvY7N3eEvFRAG1bkDRz5EPGSvPYQlw== stream-read-all@^3.0.1: version "3.0.1" - resolved "https://registry.yarnpkg.com/stream-read-all/-/stream-read-all-3.0.1.tgz#60762ae45e61d93ba0978cda7f3913790052ad96" + resolved "https://registry.npmjs.org/stream-read-all/-/stream-read-all-3.0.1.tgz#60762ae45e61d93ba0978cda7f3913790052ad96" integrity sha512-EWZT9XOceBPlVJRrYcykW8jyRSZYbkb/0ZK36uLEmoWVO5gxBOnntNTseNzfREsqxqdfEGQrD8SXQ3QWbBmq8A== stream-shift@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.1.tgz#d7088281559ab2778424279b0877da3c392d5a3d" + resolved "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz#d7088281559ab2778424279b0877da3c392d5a3d" integrity sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ== streamx@^2.12.5: - version "2.14.1" - resolved "https://registry.yarnpkg.com/streamx/-/streamx-2.14.1.tgz#783ebfb0ca494fd4655de11cbdb1ae20c4587147" - integrity sha512-Zt63KGejYaQIbZOlJeILKJxmzKJDQA6gLn1W0Fuiz7O4kTorbIQ3jhf57euOzuNAS0Y1BBP1HzGdI8bcf6bTNg== + version "2.15.1" + resolved "https://registry.npmjs.org/streamx/-/streamx-2.15.1.tgz#396ad286d8bc3eeef8f5cea3f029e81237c024c6" + integrity sha512-fQMzy2O/Q47rgwErk/eGeLu/roaFWV0jVsogDmrszM9uIw8L5OA+t+V93MgYlufNptfjmYR1tOMWhei/Eh7TQA== dependencies: fast-fifo "^1.1.0" queue-tick "^1.0.1" string-length@^4.0.1: version "4.0.2" - resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" + resolved "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== dependencies: char-regex "^1.0.2" @@ -6357,7 +6587,7 @@ string-length@^4.0.1: "string-width-cjs@npm:string-width@^4.2.0", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== dependencies: emoji-regex "^8.0.0" @@ -6366,7 +6596,7 @@ string-length@^4.0.1: string-width@^1.0.1, string-width@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" + resolved "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" integrity sha512-0XsVpQLnVCXHJfyEs8tC0zpTVIr5PKKsQtkT29IwupnPTjtPmQ3xT/4yCREF9hYkV/3M3kzcUTSAZT6a6h81tw== dependencies: code-point-at "^1.0.0" @@ -6375,7 +6605,7 @@ string-width@^1.0.1, string-width@^1.0.2: string-width@^5.0.1, string-width@^5.1.2: version "5.1.2" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794" + resolved "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794" integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA== dependencies: eastasianwidth "^0.2.0" @@ -6384,109 +6614,109 @@ string-width@^5.0.1, string-width@^5.1.2: string_decoder@^1.1.1: version "1.3.0" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== dependencies: safe-buffer "~5.2.0" string_decoder@~1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: safe-buffer "~5.1.0" "strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== dependencies: ansi-regex "^5.0.1" strip-ansi@^3.0.0, strip-ansi@^3.0.1: version "3.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" + resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" integrity sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg== dependencies: ansi-regex "^2.0.0" strip-ansi@^7.0.1: version "7.1.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" + resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== dependencies: ansi-regex "^6.0.1" strip-bom-string@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/strip-bom-string/-/strip-bom-string-1.0.0.tgz#e5211e9224369fbb81d633a2f00044dc8cedad92" + resolved "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz#e5211e9224369fbb81d633a2f00044dc8cedad92" integrity sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g== strip-bom@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" + resolved "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" integrity sha512-kwrX1y7czp1E69n2ajbG65mIo9dqvJ+8aBQXOGVxqwvNbsXdFM6Lq37dLAY3mknUwru8CfcCbfOLL/gMo+fi3g== dependencies: is-utf8 "^0.2.0" strip-bom@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" + resolved "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== strip-final-newline@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" + resolved "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== strip-indent@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" + resolved "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" integrity sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ== dependencies: min-indent "^1.0.0" strip-indent@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-4.0.0.tgz#b41379433dd06f5eae805e21d631e07ee670d853" + resolved "https://registry.npmjs.org/strip-indent/-/strip-indent-4.0.0.tgz#b41379433dd06f5eae805e21d631e07ee670d853" integrity sha512-mnVSV2l+Zv6BLpSD/8V87CW/y9EmmbYzGCIavsnsI6/nwn26DwffM/yztm30Z/I2DY9wdS3vXVCMnHDgZaVNoA== dependencies: min-indent "^1.0.1" -strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: +strip-json-comments@^3.1.1: version "3.1.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== supports-color@^5.3.0: version "5.5.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + resolved "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: has-flag "^3.0.0" supports-color@^7.1.0: version "7.2.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + resolved "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== dependencies: has-flag "^4.0.0" supports-color@^8.0.0, supports-color@^8.1.1: version "8.1.1" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + resolved "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== dependencies: has-flag "^4.0.0" supports-preserve-symlinks-flag@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + resolved "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== sver-compat@^1.5.0: version "1.5.0" - resolved "https://registry.yarnpkg.com/sver-compat/-/sver-compat-1.5.0.tgz#3cf87dfeb4d07b4a3f14827bc186b3fd0c645cd8" + resolved "https://registry.npmjs.org/sver-compat/-/sver-compat-1.5.0.tgz#3cf87dfeb4d07b4a3f14827bc186b3fd0c645cd8" integrity sha512-aFTHfmjwizMNlNE6dsGmoAM4lHjL0CyiobWaFiXWSlD7cIxshW422Nb8KbXCmR6z+0ZEPY+daXJrDyh/vuwTyg== dependencies: es6-iterator "^2.0.1" @@ -6494,7 +6724,7 @@ sver-compat@^1.5.0: table-layout@^3.0.0: version "3.0.2" - resolved "https://registry.yarnpkg.com/table-layout/-/table-layout-3.0.2.tgz#69c2be44388a5139b48c59cf21e73b488021769a" + resolved "https://registry.npmjs.org/table-layout/-/table-layout-3.0.2.tgz#69c2be44388a5139b48c59cf21e73b488021769a" integrity sha512-rpyNZYRw+/C+dYkcQ3Pr+rLxW4CfHpXjPDnG7lYhdRoUcZTUt+KEsX+94RGp/aVp/MQU35JCITv2T/beY4m+hw== dependencies: "@75lb/deep-merge" "^1.1.1" @@ -6507,19 +6737,19 @@ table-layout@^3.0.0: tapable@^2.1.1, tapable@^2.2.0: version "2.2.1" - resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" + resolved "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== teex@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/teex/-/teex-1.0.1.tgz#b8fa7245ef8e8effa8078281946c85ab780a0b12" + resolved "https://registry.npmjs.org/teex/-/teex-1.0.1.tgz#b8fa7245ef8e8effa8078281946c85ab780a0b12" integrity sha512-eYE6iEI62Ni1H8oIa7KlDU6uQBtqr4Eajni3wX7rpfXD8ysFx8z0+dri+KWEPWpBsxXfxu58x/0jvTVT1ekOSg== dependencies: streamx "^2.12.5" terser-webpack-plugin@^5.3.7: version "5.3.9" - resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.9.tgz#832536999c51b46d468067f9e37662a3b96adfe1" + resolved "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.9.tgz#832536999c51b46d468067f9e37662a3b96adfe1" integrity sha512-ZuXsqE07EcggTWQjXUj+Aot/OMcD0bMKGgF63f7UxYcu5/AJF53aIpK1YoP5xR9l6s/Hy2b+t1AM0bLNPRuhwA== dependencies: "@jridgewell/trace-mapping" "^0.3.17" @@ -6529,9 +6759,9 @@ terser-webpack-plugin@^5.3.7: terser "^5.16.8" terser@^5.16.8, terser@^5.9.0: - version "5.17.7" - resolved "https://registry.yarnpkg.com/terser/-/terser-5.17.7.tgz#2a8b134826fe179b711969fd9d9a0c2479b2a8c3" - integrity sha512-/bi0Zm2C6VAexlGgLlVxA0P2lru/sdLyfCVaRMfKVo9nWxbmz7f/sD8VPybPeSUJaJcwmCJis9pBIhcVcG1QcQ== + version "5.19.4" + resolved "https://registry.npmjs.org/terser/-/terser-5.19.4.tgz#941426fa482bf9b40a0308ab2b3cd0cf7c775ebd" + integrity sha512-6p1DjHeuluwxDXcuT9VR8p64klWJKo1ILiy19s6C9+0Bh2+NWTX6nD9EPppiER4ICkHDVB1RkVpin/YW2nQn/g== dependencies: "@jridgewell/source-map" "^0.3.3" acorn "^8.8.2" @@ -6540,7 +6770,7 @@ terser@^5.16.8, terser@^5.9.0: test-exclude@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + resolved "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== dependencies: "@istanbuljs/schema" "^0.1.2" @@ -6549,22 +6779,22 @@ test-exclude@^6.0.0: text-table@^0.2.0: version "0.2.0" - resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + resolved "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== textextensions@^3.2.0: version "3.3.0" - resolved "https://registry.yarnpkg.com/textextensions/-/textextensions-3.3.0.tgz#03530d5287b86773c08b77458589148870cc71d3" + resolved "https://registry.npmjs.org/textextensions/-/textextensions-3.3.0.tgz#03530d5287b86773c08b77458589148870cc71d3" integrity sha512-mk82dS8eRABNbeVJrEiN5/UMSCliINAuz8mkUwH4SwslkNP//gbEzlWNS5au0z5Dpx40SQxzqZevZkn+WYJ9Dw== thingies@^1.11.1: version "1.12.0" - resolved "https://registry.yarnpkg.com/thingies/-/thingies-1.12.0.tgz#a815c224482d607aa70f563d3cbb351a338e4710" + resolved "https://registry.npmjs.org/thingies/-/thingies-1.12.0.tgz#a815c224482d607aa70f563d3cbb351a338e4710" integrity sha512-AiGqfYC1jLmJagbzQGuoZRM48JPsr9yB734a7K6wzr34NMhjUPrWSQrkF7ZBybf3yCerCL2Gcr02kMv4NmaZfA== through2-filter@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/through2-filter/-/through2-filter-3.0.0.tgz#700e786df2367c2c88cd8aa5be4cf9c1e7831254" + resolved "https://registry.npmjs.org/through2-filter/-/through2-filter-3.0.0.tgz#700e786df2367c2c88cd8aa5be4cf9c1e7831254" integrity sha512-jaRjI2WxN3W1V8/FMZ9HKIBXixtiqs3SQSX4/YGIiP3gL6djW48VoZq9tDqeCWs3MT8YY5wb/zli8VW8snY1CA== dependencies: through2 "~2.0.0" @@ -6572,7 +6802,7 @@ through2-filter@^3.0.0: through2@^2.0.0, through2@^2.0.3, through2@~2.0.0: version "2.0.5" - resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" + resolved "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== dependencies: readable-stream "~2.3.6" @@ -6580,7 +6810,7 @@ through2@^2.0.0, through2@^2.0.3, through2@~2.0.0: through2@^3.0.0, through2@^3.0.1: version "3.0.2" - resolved "https://registry.yarnpkg.com/through2/-/through2-3.0.2.tgz#99f88931cfc761ec7678b41d5d7336b5b6a07bf4" + resolved "https://registry.npmjs.org/through2/-/through2-3.0.2.tgz#99f88931cfc761ec7678b41d5d7336b5b6a07bf4" integrity sha512-enaDQ4MUyP2W6ZyT6EsMzqBPZaM/avg8iuo+l2d3QCs0J+6RaqkHV/2/lOwDTueBHeJ/2LG9lrLW3d5rWPucuQ== dependencies: inherits "^2.0.4" @@ -6588,24 +6818,24 @@ through2@^3.0.0, through2@^3.0.1: through2@^4.0.2: version "4.0.2" - resolved "https://registry.yarnpkg.com/through2/-/through2-4.0.2.tgz#a7ce3ac2a7a8b0b966c80e7c49f0484c3b239764" + resolved "https://registry.npmjs.org/through2/-/through2-4.0.2.tgz#a7ce3ac2a7a8b0b966c80e7c49f0484c3b239764" integrity sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw== dependencies: readable-stream "3" through@^2.3.6, through@^2.3.8: version "2.3.8" - resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + resolved "https://registry.npmjs.org/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== time-stamp@^1.0.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/time-stamp/-/time-stamp-1.1.0.tgz#764a5a11af50561921b133f3b44e618687e0f5c3" + resolved "https://registry.npmjs.org/time-stamp/-/time-stamp-1.1.0.tgz#764a5a11af50561921b133f3b44e618687e0f5c3" integrity sha512-gLCeArryy2yNTRzTGKbZbloctj64jkZ57hj5zdraXue6aFgd6PmvVtEyiUU+hvU0v7q08oVv8r8ev0tRo6bvgw== timers-ext@^0.1.7: version "0.1.7" - resolved "https://registry.yarnpkg.com/timers-ext/-/timers-ext-0.1.7.tgz#6f57ad8578e07a3fb9f91d9387d65647555e25c6" + resolved "https://registry.npmjs.org/timers-ext/-/timers-ext-0.1.7.tgz#6f57ad8578e07a3fb9f91d9387d65647555e25c6" integrity sha512-b85NUNzTSdodShTIbky6ZF02e8STtVVfD+fu4aXXShEELpozH+bCpJLYMPZbsABN2wDH7fJpqIoXxJpzbf0NqQ== dependencies: es5-ext "~0.10.46" @@ -6613,12 +6843,12 @@ timers-ext@^0.1.7: tmpl@1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + resolved "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== to-absolute-glob@^2.0.0: version "2.0.2" - resolved "https://registry.yarnpkg.com/to-absolute-glob/-/to-absolute-glob-2.0.2.tgz#1865f43d9e74b0822db9f145b78cff7d0f7c849b" + resolved "https://registry.npmjs.org/to-absolute-glob/-/to-absolute-glob-2.0.2.tgz#1865f43d9e74b0822db9f145b78cff7d0f7c849b" integrity sha512-rtwLUQEwT8ZeKQbyFJyomBRYXyE16U5VKuy0ftxLMK/PZb2fkOsg5r9kHdauuVDbsNdIBoC/HCthpidamQFXYA== dependencies: is-absolute "^1.0.0" @@ -6626,19 +6856,19 @@ to-absolute-glob@^2.0.0: to-fast-properties@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + resolved "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== to-object-path@^0.3.0: version "0.3.0" - resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" + resolved "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" integrity sha512-9mWHdnGRuh3onocaHzukyvCZhzvr6tiflAy/JRFXcJX0TjgfWA9pk9t8CMbzmBE4Jfw58pXbkngtBtqYxzNEyg== dependencies: kind-of "^3.0.2" to-regex-range@^2.1.0: version "2.1.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" + resolved "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" integrity sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg== dependencies: is-number "^3.0.0" @@ -6646,14 +6876,14 @@ to-regex-range@^2.1.0: to-regex-range@^5.0.1: version "5.0.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + resolved "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== dependencies: is-number "^7.0.0" to-regex@^3.0.1, to-regex@^3.0.2: version "3.0.2" - resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" + resolved "https://registry.npmjs.org/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== dependencies: define-property "^2.0.2" @@ -6663,24 +6893,24 @@ to-regex@^3.0.1, to-regex@^3.0.2: to-through@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/to-through/-/to-through-2.0.0.tgz#fc92adaba072647bc0b67d6b03664aa195093af6" + resolved "https://registry.npmjs.org/to-through/-/to-through-2.0.0.tgz#fc92adaba072647bc0b67d6b03664aa195093af6" integrity sha512-+QIz37Ly7acM4EMdw2PRN389OneM5+d844tirkGp4dPKzI5OE72V9OsbFp+CIYJDahZ41ZV05hNtcPAQUAm9/Q== dependencies: through2 "^2.0.3" totalist@^3.0.0: version "3.0.1" - resolved "https://registry.yarnpkg.com/totalist/-/totalist-3.0.1.tgz#ba3a3d600c915b1a97872348f79c127475f6acf8" + resolved "https://registry.npmjs.org/totalist/-/totalist-3.0.1.tgz#ba3a3d600c915b1a97872348f79c127475f6acf8" integrity sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ== trim-newlines@^4.0.2: version "4.1.1" - resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-4.1.1.tgz#28c88deb50ed10c7ba6dc2474421904a00139125" + resolved "https://registry.npmjs.org/trim-newlines/-/trim-newlines-4.1.1.tgz#28c88deb50ed10c7ba6dc2474421904a00139125" integrity sha512-jRKj0n0jXWo6kh62nA5TEh3+4igKDXLvzBJcPpiizP7oOolUrYIxmVBG9TOtHYFHoddUk6YvAkGeGoSVTXfQXQ== ts-jest@29.1.1: version "29.1.1" - resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-29.1.1.tgz#f58fe62c63caf7bfcc5cc6472082f79180f0815b" + resolved "https://registry.npmjs.org/ts-jest/-/ts-jest-29.1.1.tgz#f58fe62c63caf7bfcc5cc6472082f79180f0815b" integrity sha512-D6xjnnbP17cC85nliwGiL+tpoKN0StpgE0TeOjXQTU6MVCfsB4v7aW05CgQ/1OywGb0x/oy9hHFnN+sczTiRaA== dependencies: bs-logger "0.x" @@ -6694,7 +6924,7 @@ ts-jest@29.1.1: ts-node@10.9.1: version "10.9.1" - resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.1.tgz#e73de9102958af9e1f0b168a6ff320e25adcff4b" + resolved "https://registry.npmjs.org/ts-node/-/ts-node-10.9.1.tgz#e73de9102958af9e1f0b168a6ff320e25adcff4b" integrity sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw== dependencies: "@cspotcode/source-map-support" "^0.8.0" @@ -6713,76 +6943,76 @@ ts-node@10.9.1: tslib@^1.8.1: version "1.14.1" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + resolved "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== -tslib@^2.1.0, tslib@^2.3.0, tslib@^2.5.3: - version "2.5.3" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.5.3.tgz#24944ba2d990940e6e982c4bea147aba80209913" - integrity sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w== +tslib@^2.1.0, tslib@^2.3.0, tslib@^2.4.0, tslib@^2.6.2: + version "2.6.2" + resolved "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" + integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== tsutils@^3.21.0: version "3.21.0" - resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" + resolved "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== dependencies: tslib "^1.8.1" type-check@^0.4.0, type-check@~0.4.0: version "0.4.0" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + resolved "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== dependencies: prelude-ls "^1.2.1" type-detect@4.0.8: version "4.0.8" - resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + resolved "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== type-fest@^0.20.2: version "0.20.2" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== type-fest@^0.21.3: version "0.21.3" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== type-fest@^0.6.0: version "0.6.0" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.6.0.tgz#8d2a2370d3df886eb5c90ada1c5bf6188acf838b" + resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz#8d2a2370d3df886eb5c90ada1c5bf6188acf838b" integrity sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg== type-fest@^0.8.1: version "0.8.1" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d" + resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d" integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== type-fest@^1.0.1, type-fest@^1.2.1, type-fest@^1.2.2: version "1.4.0" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-1.4.0.tgz#e9fb813fe3bf1744ec359d55d1affefa76f14be1" + resolved "https://registry.npmjs.org/type-fest/-/type-fest-1.4.0.tgz#e9fb813fe3bf1744ec359d55d1affefa76f14be1" integrity sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA== type@^1.0.1: version "1.2.0" - resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" + resolved "https://registry.npmjs.org/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== type@^2.7.2: version "2.7.2" - resolved "https://registry.yarnpkg.com/type/-/type-2.7.2.tgz#2376a15a3a28b1efa0f5350dcf72d24df6ef98d0" + resolved "https://registry.npmjs.org/type/-/type-2.7.2.tgz#2376a15a3a28b1efa0f5350dcf72d24df6ef98d0" integrity sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw== typedarray@^0.0.6: version "0.0.6" - resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" + resolved "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" integrity sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA== typedoc@0.24.8: version "0.24.8" - resolved "https://registry.yarnpkg.com/typedoc/-/typedoc-0.24.8.tgz#cce9f47ba6a8d52389f5e583716a2b3b4335b63e" + resolved "https://registry.npmjs.org/typedoc/-/typedoc-0.24.8.tgz#cce9f47ba6a8d52389f5e583716a2b3b4335b63e" integrity sha512-ahJ6Cpcvxwaxfu4KtjA8qZNqS43wYt6JL27wYiIgl1vd38WW/KWX11YuAeZhuz9v+ttrutSsgK+XO1CjL1kA3w== dependencies: lunr "^2.3.9" @@ -6792,32 +7022,32 @@ typedoc@0.24.8: typescript@5.1.3: version "5.1.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.1.3.tgz#8d84219244a6b40b6fb2b33cc1c062f715b9e826" + resolved "https://registry.npmjs.org/typescript/-/typescript-5.1.3.tgz#8d84219244a6b40b6fb2b33cc1c062f715b9e826" integrity sha512-XH627E9vkeqhlZFQuL+UsyAXEnibT0kWR2FWONlr4sTjvxyJYnyefgrkyECLzM5NenmKzRAy2rR/OlYLA1HkZw== typical@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/typical/-/typical-4.0.0.tgz#cbeaff3b9d7ae1e2bbfaf5a4e6f11eccfde94fc4" + resolved "https://registry.npmjs.org/typical/-/typical-4.0.0.tgz#cbeaff3b9d7ae1e2bbfaf5a4e6f11eccfde94fc4" integrity sha512-VAH4IvQ7BDFYglMd7BPRDfLgxZZX4O4TFcRDA6EN5X7erNJJq+McIEp8np9aVtxrCJ6qx4GTYVfOWNjcqwZgRw== typical@^7.1.1: version "7.1.1" - resolved "https://registry.yarnpkg.com/typical/-/typical-7.1.1.tgz#ba177ab7ab103b78534463ffa4c0c9754523ac1f" + resolved "https://registry.npmjs.org/typical/-/typical-7.1.1.tgz#ba177ab7ab103b78534463ffa4c0c9754523ac1f" integrity sha512-T+tKVNs6Wu7IWiAce5BgMd7OZfNYUndHwc5MknN+UHOudi7sGZzuHdCadllRuqJ3fPtgFtIH9+lt9qRv6lmpfA== unc-path-regex@^0.1.2: version "0.1.2" - resolved "https://registry.yarnpkg.com/unc-path-regex/-/unc-path-regex-0.1.2.tgz#e73dd3d7b0d7c5ed86fbac6b0ae7d8c6a69d50fa" + resolved "https://registry.npmjs.org/unc-path-regex/-/unc-path-regex-0.1.2.tgz#e73dd3d7b0d7c5ed86fbac6b0ae7d8c6a69d50fa" integrity sha512-eXL4nmJT7oCpkZsHZUOJo8hcX3GbsiDOa0Qu9F646fi8dT3XuSVopVqAcEiVzSKKH7UoDti23wNX3qGFxcW5Qg== undertaker-registry@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/undertaker-registry/-/undertaker-registry-1.0.1.tgz#5e4bda308e4a8a2ae584f9b9a4359a499825cc50" + resolved "https://registry.npmjs.org/undertaker-registry/-/undertaker-registry-1.0.1.tgz#5e4bda308e4a8a2ae584f9b9a4359a499825cc50" integrity sha512-UR1khWeAjugW3548EfQmL9Z7pGMlBgXteQpr1IZeZBtnkCJQJIJ1Scj0mb9wQaPvUZ9Q17XqW6TIaPchJkyfqw== undertaker@^1.2.1: version "1.3.0" - resolved "https://registry.yarnpkg.com/undertaker/-/undertaker-1.3.0.tgz#363a6e541f27954d5791d6fa3c1d321666f86d18" + resolved "https://registry.npmjs.org/undertaker/-/undertaker-1.3.0.tgz#363a6e541f27954d5791d6fa3c1d321666f86d18" integrity sha512-/RXwi5m/Mu3H6IHQGww3GNt1PNXlbeCuclF2QYR14L/2CHPz3DFZkvB5hZ0N/QUkiXWCACML2jXViIQEQc2MLg== dependencies: arr-flatten "^1.0.1" @@ -6831,9 +7061,14 @@ undertaker@^1.2.1: object.reduce "^1.0.0" undertaker-registry "^1.0.0" +undici-types@~5.26.4: + version "5.26.5" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" + integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== + union-value@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" + resolved "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== dependencies: arr-union "^3.1.0" @@ -6843,7 +7078,7 @@ union-value@^1.0.0: unique-stream@^2.0.2: version "2.3.1" - resolved "https://registry.yarnpkg.com/unique-stream/-/unique-stream-2.3.1.tgz#c65d110e9a4adf9a6c5948b28053d9a8d04cbeac" + resolved "https://registry.npmjs.org/unique-stream/-/unique-stream-2.3.1.tgz#c65d110e9a4adf9a6c5948b28053d9a8d04cbeac" integrity sha512-2nY4TnBE70yoxHkDli7DMazpWiP7xMdCYqU2nBRO0UB+ZpEkGsSija7MvmvnZFUeC+mrgiUfcHSr3LmRFIg4+A== dependencies: json-stable-stringify-without-jsonify "^1.0.1" @@ -6851,12 +7086,12 @@ unique-stream@^2.0.2: universalify@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" + resolved "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== unset-value@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" + resolved "https://registry.npmjs.org/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" integrity sha512-PcA2tsuGSF9cnySLHTLSh2qrQiJ70mn+r+Glzxv2TWZblxsxCC52BDlZoPCsz7STd9pN7EZetkWZBAvk4cgZdQ== dependencies: has-value "^0.3.1" @@ -6864,12 +7099,12 @@ unset-value@^1.0.0: upath@^1.1.1: version "1.2.0" - resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" + resolved "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== update-browserslist-db@^1.0.11: version "1.0.11" - resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz#9a2a641ad2907ae7b3616506f4b977851db5b940" + resolved "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz#9a2a641ad2907ae7b3616506f4b977851db5b940" integrity sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA== dependencies: escalade "^3.1.1" @@ -6877,34 +7112,34 @@ update-browserslist-db@^1.0.11: uri-js@^4.2.2: version "4.4.1" - resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + resolved "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== dependencies: punycode "^2.1.0" urix@^0.1.0: version "0.1.0" - resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" + resolved "https://registry.npmjs.org/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" integrity sha512-Am1ousAhSLBeB9cG/7k7r2R0zj50uDRlZHPGbazid5s9rlF1F/QKYObEKSIunSjIOkJZqwRRLpvewjEkM7pSqg== use@^3.1.0: version "3.1.1" - resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" + resolved "https://registry.npmjs.org/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== util-deprecate@^1.0.1, util-deprecate@~1.0.1: version "1.0.2" - resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== v8-compile-cache-lib@^3.0.1: version "3.0.1" - resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" + resolved "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg== v8-to-istanbul@^9.0.1: version "9.1.0" - resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-9.1.0.tgz#1b83ed4e397f58c85c266a570fc2558b5feb9265" + resolved "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.1.0.tgz#1b83ed4e397f58c85c266a570fc2558b5feb9265" integrity sha512-6z3GW9x8G1gd+JIIgQQQxXuiJtCXeAjp6RaPEPLv62mH3iPHPxV6W3robxtCzNErRo6ZwTmzWhsbNvjyEBKzKA== dependencies: "@jridgewell/trace-mapping" "^0.3.12" @@ -6913,14 +7148,14 @@ v8-to-istanbul@^9.0.1: v8flags@^3.2.0: version "3.2.0" - resolved "https://registry.yarnpkg.com/v8flags/-/v8flags-3.2.0.tgz#b243e3b4dfd731fa774e7492128109a0fe66d656" + resolved "https://registry.npmjs.org/v8flags/-/v8flags-3.2.0.tgz#b243e3b4dfd731fa774e7492128109a0fe66d656" integrity sha512-mH8etigqMfiGWdeXpaaqGfs6BndypxusHHcv2qSHyZkGEznCd/qAXCWWRzeowtL54147cktFOC4P5y+kl8d8Jg== dependencies: homedir-polyfill "^1.0.1" validate-npm-package-license@^3.0.1: version "3.0.4" - resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" + resolved "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== dependencies: spdx-correct "^3.0.0" @@ -6928,12 +7163,12 @@ validate-npm-package-license@^3.0.1: value-or-function@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/value-or-function/-/value-or-function-3.0.0.tgz#1c243a50b595c1be54a754bfece8563b9ff8d813" + resolved "https://registry.npmjs.org/value-or-function/-/value-or-function-3.0.0.tgz#1c243a50b595c1be54a754bfece8563b9ff8d813" integrity sha512-jdBB2FrWvQC/pnPtIqcLsMaQgjhdb6B7tk1MMyTKapox+tQZbdRP4uLxu/JY0t7fbfDCUMnuelzEYv5GsxHhdg== vinyl-buffer@1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/vinyl-buffer/-/vinyl-buffer-1.0.1.tgz#96c1a3479b8c5392542c612029013b5b27f88bbf" + resolved "https://registry.npmjs.org/vinyl-buffer/-/vinyl-buffer-1.0.1.tgz#96c1a3479b8c5392542c612029013b5b27f88bbf" integrity sha512-LRBE2/g3C1hSHL2k/FynSZcVTRhEw8sb08oKGt/0hukZXwrh2m8nfy+r5yLhGEk7eFFuclhyIuPct/Bxlxk6rg== dependencies: bl "^1.2.1" @@ -6941,7 +7176,7 @@ vinyl-buffer@1.0.1: vinyl-fs@^3.0.0, vinyl-fs@^3.0.3: version "3.0.3" - resolved "https://registry.yarnpkg.com/vinyl-fs/-/vinyl-fs-3.0.3.tgz#c85849405f67428feabbbd5c5dbdd64f47d31bc7" + resolved "https://registry.npmjs.org/vinyl-fs/-/vinyl-fs-3.0.3.tgz#c85849405f67428feabbbd5c5dbdd64f47d31bc7" integrity sha512-vIu34EkyNyJxmP0jscNzWBSygh7VWhqun6RmqVfXePrOwi9lhvRs//dOaGOTRUQr4tx7/zd26Tk5WeSVZitgng== dependencies: fs-mkdirp-stream "^1.0.0" @@ -6964,14 +7199,14 @@ vinyl-fs@^3.0.0, vinyl-fs@^3.0.3: vinyl-named@1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/vinyl-named/-/vinyl-named-1.1.0.tgz#94e4fe741e38db0ec303e5b3d868b297a2deab66" + resolved "https://registry.npmjs.org/vinyl-named/-/vinyl-named-1.1.0.tgz#94e4fe741e38db0ec303e5b3d868b297a2deab66" integrity sha512-ElYBnsSw8Y1Hz11WPw0DFmi+TBNTEBhZ9zXaHluDSIZZnkFIGCjGRBpsW5QmbMMLwv+lRpUD3VbKdJCbNpct7Q== dependencies: through "^2.3.6" vinyl-source-stream@2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/vinyl-source-stream/-/vinyl-source-stream-2.0.0.tgz#f38a5afb9dd1e93b65d550469ac6182ac4f54b8e" + resolved "https://registry.npmjs.org/vinyl-source-stream/-/vinyl-source-stream-2.0.0.tgz#f38a5afb9dd1e93b65d550469ac6182ac4f54b8e" integrity sha512-Y5f1wRGajOfYukhv8biIGA7iZiY8UOIc3zJ6zcUNIbRG1BVuXzBsfSfe7MUJTttVkuy64k/pGQtJdd/aIt+hbw== dependencies: through2 "^2.0.3" @@ -6979,7 +7214,7 @@ vinyl-source-stream@2.0.0: vinyl-sourcemap@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/vinyl-sourcemap/-/vinyl-sourcemap-1.1.0.tgz#92a800593a38703a8cdb11d8b300ad4be63b3e16" + resolved "https://registry.npmjs.org/vinyl-sourcemap/-/vinyl-sourcemap-1.1.0.tgz#92a800593a38703a8cdb11d8b300ad4be63b3e16" integrity sha512-NiibMgt6VJGJmyw7vtzhctDcfKch4e4n9TBeoWlirb7FMg9/1Ov9k+A5ZRAtywBpRPiyECvQRQllYM8dECegVA== dependencies: append-buffer "^1.0.2" @@ -6992,14 +7227,14 @@ vinyl-sourcemap@^1.1.0: vinyl-sourcemaps-apply@^0.2.0, vinyl-sourcemaps-apply@^0.2.1: version "0.2.1" - resolved "https://registry.yarnpkg.com/vinyl-sourcemaps-apply/-/vinyl-sourcemaps-apply-0.2.1.tgz#ab6549d61d172c2b1b87be5c508d239c8ef87705" + resolved "https://registry.npmjs.org/vinyl-sourcemaps-apply/-/vinyl-sourcemaps-apply-0.2.1.tgz#ab6549d61d172c2b1b87be5c508d239c8ef87705" integrity sha512-+oDh3KYZBoZC8hfocrbrxbLUeaYtQK7J5WU5Br9VqWqmCll3tFJqKp97GC9GmMsVIL0qnx2DgEDVxdo5EZ5sSw== dependencies: source-map "^0.5.1" vinyl@2.x, vinyl@^2.0.0, vinyl@^2.1.0, vinyl@^2.2.1: version "2.2.1" - resolved "https://registry.yarnpkg.com/vinyl/-/vinyl-2.2.1.tgz#23cfb8bbab5ece3803aa2c0a1eb28af7cbba1974" + resolved "https://registry.npmjs.org/vinyl/-/vinyl-2.2.1.tgz#23cfb8bbab5ece3803aa2c0a1eb28af7cbba1974" integrity sha512-LII3bXRFBZLlezoG5FfZVcXflZgWP/4dCwKtxd5ky9+LOtM4CS3bIRQsmR1KMnMW07jpE8fqR2lcxPZ+8sJIcw== dependencies: clone "^2.1.1" @@ -7011,7 +7246,7 @@ vinyl@2.x, vinyl@^2.0.0, vinyl@^2.1.0, vinyl@^2.2.1: vinyl@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/vinyl/-/vinyl-3.0.0.tgz#11e14732bf56e2faa98ffde5157fe6c13259ff30" + resolved "https://registry.npmjs.org/vinyl/-/vinyl-3.0.0.tgz#11e14732bf56e2faa98ffde5157fe6c13259ff30" integrity sha512-rC2VRfAVVCGEgjnxHUnpIVh3AGuk62rP3tqVrn+yab0YH7UULisC085+NYH+mnqf3Wx4SpSi1RQMwudL89N03g== dependencies: clone "^2.1.2" @@ -7022,24 +7257,24 @@ vinyl@^3.0.0: vscode-oniguruma@^1.7.0: version "1.7.0" - resolved "https://registry.yarnpkg.com/vscode-oniguruma/-/vscode-oniguruma-1.7.0.tgz#439bfad8fe71abd7798338d1cd3dc53a8beea94b" + resolved "https://registry.npmjs.org/vscode-oniguruma/-/vscode-oniguruma-1.7.0.tgz#439bfad8fe71abd7798338d1cd3dc53a8beea94b" integrity sha512-L9WMGRfrjOhgHSdOYgCt/yRMsXzLDJSL7BPrOZt73gU0iWO4mpqzqQzOz5srxqTvMBaR0XZTSrVWo4j55Rc6cA== vscode-textmate@^8.0.0: version "8.0.0" - resolved "https://registry.yarnpkg.com/vscode-textmate/-/vscode-textmate-8.0.0.tgz#2c7a3b1163ef0441097e0b5d6389cd5504b59e5d" + resolved "https://registry.npmjs.org/vscode-textmate/-/vscode-textmate-8.0.0.tgz#2c7a3b1163ef0441097e0b5d6389cd5504b59e5d" integrity sha512-AFbieoL7a5LMqcnOF04ji+rpXadgOXnZsxQr//r83kLPr7biP7am3g9zbaZIaBGwBRWeSvoMD4mgPdX3e4NWBg== walker@^1.0.8: version "1.0.8" - resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" + resolved "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== dependencies: makeerror "1.0.12" watchpack@^2.4.0: version "2.4.0" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" + resolved "https://registry.npmjs.org/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== dependencies: glob-to-regexp "^0.4.1" @@ -7047,12 +7282,12 @@ watchpack@^2.4.0: web-streams-polyfill@3.2.1: version "3.2.1" - resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz#71c2718c52b45fd49dbeee88634b3a60ceab42a6" + resolved "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz#71c2718c52b45fd49dbeee88634b3a60ceab42a6" integrity sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q== webpack-bundle-analyzer@4.9.1: version "4.9.1" - resolved "https://registry.yarnpkg.com/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.9.1.tgz#d00bbf3f17500c10985084f22f1a2bf45cb2f09d" + resolved "https://registry.npmjs.org/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.9.1.tgz#d00bbf3f17500c10985084f22f1a2bf45cb2f09d" integrity sha512-jnd6EoYrf9yMxCyYDPj8eutJvtjQNp8PHmni/e/ulydHBWhT5J3menXt3HEkScsu9YqMAcG4CfFjs3rj5pVU1w== dependencies: "@discoveryjs/json-ext" "0.5.7" @@ -7075,12 +7310,12 @@ webpack-bundle-analyzer@4.9.1: webpack-sources@^3.2.3: version "3.2.3" - resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde" + resolved "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde" integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== webpack-stream@7.0.0: version "7.0.0" - resolved "https://registry.yarnpkg.com/webpack-stream/-/webpack-stream-7.0.0.tgz#e6a1edb9568198499af872678e95031752d72f00" + resolved "https://registry.npmjs.org/webpack-stream/-/webpack-stream-7.0.0.tgz#e6a1edb9568198499af872678e95031752d72f00" integrity sha512-XoAQTHyCaYMo6TS7Atv1HYhtmBgKiVLONJbzLBl2V3eibXQ2IT/MCRM841RW/r3vToKD5ivrTJFWgd/ghoxoRg== dependencies: fancy-log "^1.3.3" @@ -7094,7 +7329,7 @@ webpack-stream@7.0.0: webpack@5.86.0: version "5.86.0" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.86.0.tgz#b0eb81794b62aee0b7e7eb8c5073495217d9fc6d" + resolved "https://registry.npmjs.org/webpack/-/webpack-5.86.0.tgz#b0eb81794b62aee0b7e7eb8c5073495217d9fc6d" integrity sha512-3BOvworZ8SO/D4GVP+GoRC3fVeg5MO4vzmq8TJJEkdmopxyazGDxN8ClqN12uzrZW9Tv8EED8v5VSb6Sqyi0pg== dependencies: "@types/eslint-scope" "^3.7.3" @@ -7124,37 +7359,32 @@ webpack@5.86.0: which-module@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/which-module/-/which-module-1.0.0.tgz#bba63ca861948994ff307736089e3b96026c2a4f" + resolved "https://registry.npmjs.org/which-module/-/which-module-1.0.0.tgz#bba63ca861948994ff307736089e3b96026c2a4f" integrity sha512-F6+WgncZi/mJDrammbTuHe1q0R5hOXv/mBaiNA2TCNT/LTHusX0V+CJnj9XT8ki5ln2UZyyddDgHfCzyrOH7MQ== which@^1.2.14: version "1.3.1" - resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" + resolved "https://registry.npmjs.org/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== dependencies: isexe "^2.0.0" which@^2.0.1: version "2.0.2" - resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + resolved "https://registry.npmjs.org/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== dependencies: isexe "^2.0.0" -word-wrap@^1.2.3: - version "1.2.4" - resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.4.tgz#cb4b50ec9aca570abd1f52f33cd45b6c61739a9f" - integrity sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA== - wordwrapjs@^5.1.0: version "5.1.0" - resolved "https://registry.yarnpkg.com/wordwrapjs/-/wordwrapjs-5.1.0.tgz#4c4d20446dcc670b14fa115ef4f8fd9947af2b3a" + resolved "https://registry.npmjs.org/wordwrapjs/-/wordwrapjs-5.1.0.tgz#4c4d20446dcc670b14fa115ef4f8fd9947af2b3a" integrity sha512-JNjcULU2e4KJwUNv6CHgI46UvDGitb6dGryHajXTDiLgg1/RiGoPSDw4kZfYnwGtEXf2ZMeIewDQgFGzkCB2Sg== "wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: name wrap-ansi-cjs version "7.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== dependencies: ansi-styles "^4.0.0" @@ -7163,7 +7393,7 @@ wordwrapjs@^5.1.0: wrap-ansi@^2.0.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" + resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" integrity sha512-vAaEaDM946gbNpH5pLVNR+vX2ht6n0Bt3GXwVB1AuAqZosOvHNF3P7wDnh8KLkSqgUh0uh77le7Owgoz+Z9XBw== dependencies: string-width "^1.0.1" @@ -7171,7 +7401,7 @@ wrap-ansi@^2.0.0: wrap-ansi@^6.2.0: version "6.2.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53" + resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53" integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA== dependencies: ansi-styles "^4.0.0" @@ -7180,7 +7410,7 @@ wrap-ansi@^6.2.0: wrap-ansi@^8.1.0: version "8.1.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" + resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" integrity sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ== dependencies: ansi-styles "^6.1.0" @@ -7189,12 +7419,12 @@ wrap-ansi@^8.1.0: wrappy@1: version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + resolved "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== write-file-atomic@^4.0.2: version "4.0.2" - resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-4.0.2.tgz#a9df01ae5b77858a027fd2e80768ee433555fcfd" + resolved "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz#a9df01ae5b77858a027fd2e80768ee433555fcfd" integrity sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg== dependencies: imurmurhash "^0.1.4" @@ -7202,12 +7432,12 @@ write-file-atomic@^4.0.2: ws@^7.3.1: version "7.5.9" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" + resolved "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== xml2js@0.6.2: version "0.6.2" - resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.6.2.tgz#dd0b630083aa09c161e25a4d0901e2b2a929b499" + resolved "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz#dd0b630083aa09c161e25a4d0901e2b2a929b499" integrity sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA== dependencies: sax ">=0.6.0" @@ -7215,47 +7445,47 @@ xml2js@0.6.2: xmlbuilder@~11.0.0: version "11.0.1" - resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3" + resolved "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3" integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA== xtend@~4.0.0, xtend@~4.0.1: version "4.0.2" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" + resolved "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== y18n@^3.2.1: version "3.2.2" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-3.2.2.tgz#85c901bd6470ce71fc4bb723ad209b70f7f28696" + resolved "https://registry.npmjs.org/y18n/-/y18n-3.2.2.tgz#85c901bd6470ce71fc4bb723ad209b70f7f28696" integrity sha512-uGZHXkHnhF0XeeAPgnKfPv1bgKAYyVvmNL1xlKsPYZPaIHxGti2hHqvOCQv71XMsLxu1QjergkqogUnms5D3YQ== y18n@^5.0.5: version "5.0.8" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + resolved "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== yallist@^3.0.2: version "3.1.1" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" + resolved "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== yallist@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + resolved "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== yargs-parser@>=5.0.0-security.0, yargs-parser@^21.0.1, yargs-parser@^21.1.1: version "21.1.1" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" + resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== yargs-parser@^20.2.9: version "20.2.9" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" + resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== yargs-parser@^5.0.1: version "5.0.1" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-5.0.1.tgz#7ede329c1d8cdbbe209bd25cdb990e9b1ebbb394" + resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-5.0.1.tgz#7ede329c1d8cdbbe209bd25cdb990e9b1ebbb394" integrity sha512-wpav5XYiddjXxirPoCTUPbqM0PXvJ9hiBMvuJgInvo4/lAOTZzUprArw17q2O1P2+GHhbBr18/iQwjL5Z9BqfA== dependencies: camelcase "^3.0.0" @@ -7263,7 +7493,7 @@ yargs-parser@^5.0.1: yargs@^17.3.1: version "17.7.2" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" + resolved "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== dependencies: cliui "^8.0.1" @@ -7276,7 +7506,7 @@ yargs@^17.3.1: yargs@^7.1.0: version "7.1.2" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-7.1.2.tgz#63a0a5d42143879fdbb30370741374e0641d55db" + resolved "https://registry.npmjs.org/yargs/-/yargs-7.1.2.tgz#63a0a5d42143879fdbb30370741374e0641d55db" integrity sha512-ZEjj/dQYQy0Zx0lgLMLR8QuaqTihnxirir7EwUHp1Axq4e3+k8jXU5K0VLbNvedv1f4EWtBonDIZm0NUr+jCcA== dependencies: camelcase "^3.0.0" @@ -7295,10 +7525,10 @@ yargs@^7.1.0: yn@3.1.1: version "3.1.1" - resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" + resolved "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== yocto-queue@^0.1.0: version "0.1.0" - resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" + resolved "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== diff --git a/matlab/CMakeLists.txt b/matlab/CMakeLists.txt index b7af37a278536..47d2acd613f8b 100644 --- a/matlab/CMakeLists.txt +++ b/matlab/CMakeLists.txt @@ -94,7 +94,7 @@ endfunction() set(CMAKE_CXX_STANDARD 17) -set(MLARROW_VERSION "14.0.0-SNAPSHOT") +set(MLARROW_VERSION "15.0.0-SNAPSHOT") string(REGEX MATCH "^[0-9]+\\.[0-9]+\\.[0-9]+" MLARROW_BASE_VERSION "${MLARROW_VERSION}") project(mlarrow VERSION "${MLARROW_BASE_VERSION}") diff --git a/matlab/doc/testing_guidelines_for_the_matlab_interface_to_apache_arrow.md b/matlab/doc/testing_guidelines_for_the_matlab_interface_to_apache_arrow.md new file mode 100644 index 0000000000000..ee4343155deba --- /dev/null +++ b/matlab/doc/testing_guidelines_for_the_matlab_interface_to_apache_arrow.md @@ -0,0 +1,144 @@ + + +# Testing Guidelines for the MATLAB Interface to Apache Arrow + +## Overview + +The goal of this document is to provide helpful guidelines for testing functionality within the [`matlab` directory](https://github.com/apache/arrow/tree/main/matlab) of the [`apache/arrow`](https://github.com/apache/arrow) repository. + +## Prerequisites + +Adding tests to the MATLAB interface helps to ensure quality and verify that the software works as intended. To run the MATLAB interface tests, the following software must be installed locally: + +1. [MATLAB](https://www.mathworks.com/products/get-matlab.html) +2. [MATLAB Interface to Apache Arrow](https://github.com/mathworks/arrow/tree/main/matlab) + +## Running Tests Locally + +To run the MATLAB interface tests on a local machine, start MATLAB and then `cd` to the directory under `matlab/test` where the test files of interest reside. After changing to the test directory, call the `runtests` command to run your tests: + +```matlab +% To run a single test file +>> runtests(testFileName) % For example: runtests("tArray.m") + +% To run all tests recursively under a test directory +>> runtests(testFolderName, IncludeSubfolders = true) % For example: runtests('matlab\test', IncludeSubfolders = true) +``` + +To learn more about `runtests`, please check [the documentation](https://www.mathworks.com/help/matlab/ref/runtests.html). + +## Writing Tests + +All tests for the MATLAB interface should use the [MATLAB Class-Based Unit Testing Framework](https://www.mathworks.com/help/matlab/class-based-unit-tests.html) (i.e. they should use [`matlab.unittest.TestCase`](https://www.mathworks.com/help/matlab/ref/matlab.unittest.testcase-class.html)). + +Included below is a simple example of a MATLAB test: + +```matlab +classdef tStringArray < matlab.unittest.TestCase + methods(Test) + function TestBasicStringArray(testCase) + % Verify that an `arrow.array.StringArray` can be created from + % a basic MATLAB `string` array using the `arrow.array` gateway + % construction function. + + % Create a basic MATLAB `string` array. + matlabArray = ["A" ,"B", "C"]; + % Create an `arrow.array.StringArray` from the MATLAB `string` + % array by using the `arrow.array` gateway construction function. + arrowArray = arrow.array(matlabArray); + % Verify the class of `arrowArray` is `arrow.array.StringArray`. + testCase.verifyEqual(string(class(arrowArray)), "arrow.array.StringArray"); + % Verify `arrowArray` can be converted back into a MATLAB `string` array. + testCase.verifyEqual(arrowArray.toMATLAB, ["A"; "B"; "C"]); + end + end +end +``` + +More test examples can be found in the `matlab/test` directory. + +### Testing Best Practices + +- Use descriptive names for your test cases. +- Focus on testing one software "behavior" in each test case. +- Test with both "expected" and "unexpected" inputs. +- Add a comment at the beginning of each test case which describes what the test case is verifying. +- Treat test code like any other code (i.e. use clear variable names, write helper functions, make use of abstraction, etc.) +- Follow existing patterns when adding new test cases to an existing test class. + +## Test Case Design Guidelines + +When adding new tests, it is recommended to, at a minimum, ensure that real-world workflows work as expected. + +If a change cannot be easily tested at the MATLAB interface level (e.g. you would like to test the behavior of a C++ `Proxy` method), consider creating a `Proxy` instance manually from a MATLAB test case and calling relevant methods on the `Proxy`. + +An example of this approach to test C++ `Proxy` code can be found in [`matlab/test/arrow/tabular/tTabularInternal.m`](https://github.com/apache/arrow/blob/main/matlab/test/arrow/tabular/tTabularInternal.m). + +## Test Organization + +All tests for the MATLAB interface are located under the `matlab/test` directory. + +To make it easy to find the test files which correspond to specific source files, the MATLAB interface tests are organized using the following rules: + +- Source and test directories follow an (approximately) "parallel" structure. For example, the test directory [`test/arrow/array`](https://github.com/apache/arrow/tree/main/matlab/test/arrow/array) contains tests for the source directory [`src/matlab/+arrow/+array`](https://github.com/apache/arrow/tree/main/matlab/src/matlab/%2Barrow/%2Barray). +- One test file maps to one source file. For example, [`test/arrow/array/tArray.m`](https://github.com/apache/arrow/blob/main/matlab/test/arrow/array/tArray.m) is the test file for [`src/matlab/+arrow/+array/Array.m`](https://github.com/apache/arrow/blob/main/matlab/src/matlab/%2Barrow/%2Barray/Array.m). +- **Note**: In certain scenarios, it can make sense to diverge from these rules. For example, if a particular class is very complex and contains a lot of divergent functionality (which we generally try to avoid), we might choose to split the testing into several "focused" test files (e.g. one for testing the class display, one for testing the properties, and one for testing the methods). + +## Continuous Integration (CI) Workflows + +The Apache Arrow project uses [GitHub Actions](https://github.com/features/actions) as its primary [Continuous Integration (CI)](https://en.wikipedia.org/wiki/Continuous_integration) platform. + +Creating a pull request that changes code in the MATLAB interface will automatically trigger [MATLAB CI Workflows](https://github.com/apache/arrow/actions/workflows/matlab.yml) to be run. These CI workflows will run all tests located under the `matlab/test` directory. + +Reviewers will generally expect the MATLAB CI Workflows to be passing successfully before they will consider merging a pull request. + +If you are having trouble understanding CI failures, you can always ask a reviewer or another community member for help. + +## Code Coverage Goals + +When making changes to the MATLAB interface, please do your best to add tests to cover all changed lines, conditions, and decisions. + +Before making a pull request, please check the code coverage for any changed code. If possible, it can be helpful to explicitly comment on the code coverage in your pull request description. + +Although we strive for high code coverage, it is understood that some code cannot be reasonably tested (e.g. an "un-reachable" branch in a `switch` condition on an enumeration value). + +### How to Check Code Coverage + +***Requirement:** MATLAB R2023b or later.* + +To generate a MATLAB code coverage report, the [`ReportCoverageFor`](https://www.mathworks.com/help/matlab/ref/runtests.html#mw_764c9db7-6823-439f-a77d-7fd25a03d20e) name-value pair argument can be supplied to the [`runtests`](https://www.mathworks.com/help/matlab/ref/runtests.html) command. Before generating the code coverage report, remember to add your source file directory to the [MATLAB Search Path](https://www.mathworks.com/help/matlab/matlab_env/what-is-the-matlab-search-path.html). + +```matlab +>> addpath( genpath() ) % `genpath` is needed to include all subdirectories and add them to MATLAB search path. +>> runtests(testFilePath/testFolderPath, 'ReportCoverageFor', sourceFilePath/sourceFolderPath, 'IncludeSubfolders', true/false); +``` + +Below is an example of running all tests under `matlab/test` and getting the MATLAB code coverage report for all files under `matlab/src/matlab`. + +```matlab +>> addpath(genpath("C:\TryCodeCoverage\arrow\matlab")) +>> runtests('C:\TryCodeCoverage\arrow\matlab\test', 'ReportCoverageFor', 'C:\TryCodeCoverage\arrow\matlab\src\matlab\', 'IncludeSubfolders', true); +``` + +## Tips + +### Debugging Code Coverage Results + +If the `runtests` command with `RepoCoverageFor` reports confusing or incorrect code coverage results, this could be due to caching or other issues. As a workaround, you can try setting a breakpoint in your source file, and then re-run the tests. This step can be used to verify that your source file is being executed by the tests. diff --git a/matlab/src/cpp/arrow/matlab/array/proxy/array.cc b/matlab/src/cpp/arrow/matlab/array/proxy/array.cc index 4e52c990d3eae..bc5ab093b4534 100644 --- a/matlab/src/cpp/arrow/matlab/array/proxy/array.cc +++ b/matlab/src/cpp/arrow/matlab/array/proxy/array.cc @@ -18,8 +18,10 @@ #include "arrow/util/utf8.h" #include "arrow/matlab/array/proxy/array.h" +#include "arrow/matlab/array/proxy/wrap.h" #include "arrow/matlab/bit/unpack.h" #include "arrow/matlab/error/error.h" +#include "arrow/matlab/index/validate.h" #include "arrow/matlab/type/proxy/wrap.h" #include "arrow/pretty_print.h" #include "arrow/type_traits.h" @@ -38,7 +40,7 @@ namespace arrow::matlab::array::proxy { REGISTER_METHOD(Array, getValid); REGISTER_METHOD(Array, getType); REGISTER_METHOD(Array, isEqual); - + REGISTER_METHOD(Array, slice); } std::shared_ptr Array::unwrap() { @@ -100,7 +102,7 @@ namespace arrow::matlab::array::proxy { } auto validity_bitmap = array->null_bitmap(); - auto valid_elements_mda = bit::unpack(validity_bitmap, array_length); + auto valid_elements_mda = bit::unpack(validity_bitmap, array_length, array->offset()); context.outputs[0] = valid_elements_mda; } @@ -144,4 +146,36 @@ namespace arrow::matlab::array::proxy { mda::ArrayFactory factory; context.outputs[0] = factory.createScalar(is_equal); } + + void Array::slice(libmexclass::proxy::method::Context& context) { + namespace mda = ::matlab::data; + + mda::StructArray opts = context.inputs[0]; + const mda::TypedArray offset_mda = opts[0]["Offset"]; + const mda::TypedArray length_mda = opts[0]["Length"]; + + const auto matlab_offset = int64_t(offset_mda[0]); + MATLAB_ERROR_IF_NOT_OK_WITH_CONTEXT(arrow::matlab::index::validateSliceOffset(matlab_offset), + context, error::ARRAY_SLICE_NON_POSITIVE_OFFSET); + + // Note: MATLAB uses 1-based indexing, so subtract 1. + const int64_t offset = matlab_offset - 1; + const int64_t length = int64_t(length_mda[0]); + MATLAB_ERROR_IF_NOT_OK_WITH_CONTEXT(arrow::matlab::index::validateSliceLength(length), + context, error::ARRAY_SLICE_NEGATIVE_LENGTH); + + auto sliced_array = array->Slice(offset, length); + const auto type_id = static_cast(sliced_array->type_id()); + MATLAB_ASSIGN_OR_ERROR_WITH_CONTEXT(auto sliced_array_proxy, + array::proxy::wrap(sliced_array), + context, error::ARRAY_SLICE_FAILED_TO_CREATE_ARRAY_PROXY); + + const auto proxy_id = libmexclass::proxy::ProxyManager::manageProxy(sliced_array_proxy); + + mda::ArrayFactory factory; + mda::StructArray output = factory.createStructArray({1, 1}, {"ProxyID", "TypeID"}); + output[0]["ProxyID"] = factory.createScalar(proxy_id); + output[0]["TypeID"] = factory.createScalar(type_id); + context.outputs[0] = output; + } } diff --git a/matlab/src/cpp/arrow/matlab/array/proxy/array.h b/matlab/src/cpp/arrow/matlab/array/proxy/array.h index 0ab7b279bc92e..1e3164ed01a72 100644 --- a/matlab/src/cpp/arrow/matlab/array/proxy/array.h +++ b/matlab/src/cpp/arrow/matlab/array/proxy/array.h @@ -44,6 +44,8 @@ class Array : public libmexclass::proxy::Proxy { void isEqual(libmexclass::proxy::method::Context& context); + void slice(libmexclass::proxy::method::Context& context); + std::shared_ptr array; }; diff --git a/matlab/src/cpp/arrow/matlab/array/proxy/boolean_array.cc b/matlab/src/cpp/arrow/matlab/array/proxy/boolean_array.cc index 6a6e478274823..da3560ce522f3 100644 --- a/matlab/src/cpp/arrow/matlab/array/proxy/boolean_array.cc +++ b/matlab/src/cpp/arrow/matlab/array/proxy/boolean_array.cc @@ -53,7 +53,7 @@ namespace arrow::matlab::array::proxy { void BooleanArray::toMATLAB(libmexclass::proxy::method::Context& context) { auto array_length = array->length(); auto packed_logical_data_buffer = std::static_pointer_cast(array)->values(); - auto logical_array_mda = bit::unpack(packed_logical_data_buffer, array_length); + auto logical_array_mda = bit::unpack(packed_logical_data_buffer, array_length, array->offset()); context.outputs[0] = logical_array_mda; } } diff --git a/matlab/src/cpp/arrow/matlab/array/proxy/list_array.cc b/matlab/src/cpp/arrow/matlab/array/proxy/list_array.cc index fc75e55dd6012..941e658c25127 100644 --- a/matlab/src/cpp/arrow/matlab/array/proxy/list_array.cc +++ b/matlab/src/cpp/arrow/matlab/array/proxy/list_array.cc @@ -15,6 +15,7 @@ // specific language governing permissions and limitations // under the License. +#include "arrow/matlab/array/validation_mode.h" #include "arrow/matlab/array/proxy/list_array.h" #include "arrow/matlab/array/proxy/numeric_array.h" #include "arrow/matlab/array/proxy/wrap.h" @@ -26,6 +27,7 @@ namespace arrow::matlab::array::proxy { ListArray::ListArray(std::shared_ptr list_array) : proxy::Array{std::move(list_array)} { REGISTER_METHOD(ListArray, getValues); REGISTER_METHOD(ListArray, getOffsets); + REGISTER_METHOD(ListArray, validate); } libmexclass::proxy::MakeResult ListArray::make(const libmexclass::proxy::FunctionArguments& constructor_arguments) { @@ -100,4 +102,38 @@ namespace arrow::matlab::array::proxy { mda::ArrayFactory factory; context.outputs[0] = factory.createScalar(offsets_int32_array_proxy_id); } + + void ListArray::validate(libmexclass::proxy::method::Context& context) { + namespace mda = ::matlab::data; + mda::StructArray args = context.inputs[0]; + const mda::TypedArray validation_mode_mda = args[0]["ValidationMode"]; + const auto validation_mode_integer = uint8_t(validation_mode_mda[0]); + // Convert integer representation to ValidationMode enum. + const auto validation_mode = static_cast(validation_mode_integer); + switch (validation_mode) { + case ValidationMode::None: { + // Do nothing. + break; + } + case ValidationMode::Minimal: { + MATLAB_ERROR_IF_NOT_OK_WITH_CONTEXT(array->Validate(), + context, + error::ARRAY_VALIDATE_MINIMAL_FAILED); + break; + } + case ValidationMode::Full: { + MATLAB_ERROR_IF_NOT_OK_WITH_CONTEXT(array->ValidateFull(), + context, + error::ARRAY_VALIDATE_FULL_FAILED); + break; + } + default: { + // Throw an error if an unsupported enumeration value is provided. + const auto msg = "Unsupported ValidationMode enumeration value: " + std::to_string(validation_mode_integer); + context.error = libmexclass::error::Error{error::ARRAY_VALIDATE_UNSUPPORTED_ENUM, msg}; + return; + } + } + } + } diff --git a/matlab/src/cpp/arrow/matlab/array/proxy/list_array.h b/matlab/src/cpp/arrow/matlab/array/proxy/list_array.h index 8db6b6bf1d632..1f34b11406594 100644 --- a/matlab/src/cpp/arrow/matlab/array/proxy/list_array.h +++ b/matlab/src/cpp/arrow/matlab/array/proxy/list_array.h @@ -32,6 +32,7 @@ class ListArray : public arrow::matlab::array::proxy::Array { protected: void getValues(libmexclass::proxy::method::Context& context); void getOffsets(libmexclass::proxy::method::Context& context); + void validate(libmexclass::proxy::method::Context& context); }; diff --git a/matlab/src/cpp/arrow/matlab/array/validation_mode.h b/matlab/src/cpp/arrow/matlab/array/validation_mode.h new file mode 100644 index 0000000000000..92e10f47aa4e7 --- /dev/null +++ b/matlab/src/cpp/arrow/matlab/array/validation_mode.h @@ -0,0 +1,30 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#pragma once + +#include + +namespace arrow::matlab::array { + + enum class ValidationMode : uint8_t { + None = 0, + Minimal = 1, + Full = 2 + }; + +} diff --git a/matlab/src/cpp/arrow/matlab/bit/unpack.cc b/matlab/src/cpp/arrow/matlab/bit/unpack.cc index 7135d593cf752..6cc88d48ede43 100644 --- a/matlab/src/cpp/arrow/matlab/bit/unpack.cc +++ b/matlab/src/cpp/arrow/matlab/bit/unpack.cc @@ -20,7 +20,7 @@ #include "arrow/util/bitmap_visit.h" namespace arrow::matlab::bit { - ::matlab::data::TypedArray unpack(const std::shared_ptr& packed_buffer, int64_t length) { + ::matlab::data::TypedArray unpack(const std::shared_ptr& packed_buffer, int64_t length, int64_t start_offset) { const auto packed_buffer_ptr = packed_buffer->data(); ::matlab::data::ArrayFactory factory; @@ -31,7 +31,6 @@ namespace arrow::matlab::bit { auto unpacked_buffer_ptr = unpacked_buffer.get(); auto visitFcn = [&](const bool is_valid) { *unpacked_buffer_ptr++ = is_valid; }; - const int64_t start_offset = 0; arrow::internal::VisitBitsUnrolled(packed_buffer_ptr, start_offset, length, visitFcn); ::matlab::data::TypedArray unpacked_matlab_logical_Array = factory.createArrayFromBuffer({array_length, 1}, std::move(unpacked_buffer)); diff --git a/matlab/src/cpp/arrow/matlab/bit/unpack.h b/matlab/src/cpp/arrow/matlab/bit/unpack.h index b6debb85f837b..6cd633e76fa56 100644 --- a/matlab/src/cpp/arrow/matlab/bit/unpack.h +++ b/matlab/src/cpp/arrow/matlab/bit/unpack.h @@ -22,6 +22,6 @@ #include "MatlabDataArray.hpp" namespace arrow::matlab::bit { - ::matlab::data::TypedArray unpack(const std::shared_ptr& packed_buffer, int64_t length); + ::matlab::data::TypedArray unpack(const std::shared_ptr& packed_buffer, int64_t length, int64_t start_offset); const uint8_t* extract_ptr(const ::matlab::data::TypedArray& unpacked_validity_bitmap); } diff --git a/matlab/src/cpp/arrow/matlab/error/error.h b/matlab/src/cpp/arrow/matlab/error/error.h index 2d8f5c432c96e..e6be411b62a05 100644 --- a/matlab/src/cpp/arrow/matlab/error/error.h +++ b/matlab/src/cpp/arrow/matlab/error/error.h @@ -202,4 +202,12 @@ namespace arrow::matlab::error { static const char* INDEX_OUT_OF_RANGE = "arrow:index:OutOfRange"; static const char* BUFFER_VIEW_OR_COPY_FAILED = "arrow:buffer:ViewOrCopyFailed"; static const char* ARRAY_PRETTY_PRINT_FAILED = "arrow:array:PrettyPrintFailed"; + static const char* TABULAR_GET_ROW_AS_STRING_FAILED = "arrow:tabular:GetRowAsStringFailed"; + static const char* ARRAY_VALIDATE_MINIMAL_FAILED = "arrow:array:ValidateMinimalFailed"; + static const char* ARRAY_VALIDATE_FULL_FAILED = "arrow:array:ValidateFullFailed"; + static const char* ARRAY_VALIDATE_UNSUPPORTED_ENUM = "arrow:array:ValidateUnsupportedEnum"; + static const char* ARRAY_SLICE_NON_POSITIVE_OFFSET = "arrow:array:slice:NonPositiveOffset"; + static const char* ARRAY_SLICE_NEGATIVE_LENGTH = "arrow:array:slice:NegativeLength"; + static const char* ARRAY_SLICE_FAILED_TO_CREATE_ARRAY_PROXY = "arrow:array:slice:FailedToCreateArrayProxy"; + } diff --git a/matlab/src/cpp/arrow/matlab/index/validate.cc b/matlab/src/cpp/arrow/matlab/index/validate.cc index b24653f1b814c..84e8e424e171f 100644 --- a/matlab/src/cpp/arrow/matlab/index/validate.cc +++ b/matlab/src/cpp/arrow/matlab/index/validate.cc @@ -53,4 +53,20 @@ namespace arrow::matlab::index { } return arrow::Status::OK(); } + + arrow::Status validateSliceOffset(const int64_t matlab_offset) { + if (matlab_offset < 1) { + const std::string msg = "Slice offset must be positive"; + return arrow::Status::Invalid(std::move(msg)); + } + return arrow::Status::OK(); + } + + arrow::Status validateSliceLength(const int64_t length) { + if (length < 0) { + const std::string msg = "Slice length must be nonnegative"; + return arrow::Status::Invalid(std::move(msg)); + } + return arrow::Status::OK(); + } } \ No newline at end of file diff --git a/matlab/src/cpp/arrow/matlab/index/validate.h b/matlab/src/cpp/arrow/matlab/index/validate.h index 40e109c19e9ef..2fa88ef8f1b5a 100644 --- a/matlab/src/cpp/arrow/matlab/index/validate.h +++ b/matlab/src/cpp/arrow/matlab/index/validate.h @@ -23,4 +23,7 @@ namespace arrow::matlab::index { arrow::Status validateNonEmptyContainer(const int32_t num_fields); arrow::Status validateInRange(const int32_t matlab_index, const int32_t num_fields); + arrow::Status validateSliceOffset(const int64_t matlab_offset); + arrow::Status validateSliceLength(const int64_t length); + } \ No newline at end of file diff --git a/matlab/src/cpp/arrow/matlab/tabular/get_row_as_string.h b/matlab/src/cpp/arrow/matlab/tabular/get_row_as_string.h new file mode 100644 index 0000000000000..824b6c19a7109 --- /dev/null +++ b/matlab/src/cpp/arrow/matlab/tabular/get_row_as_string.h @@ -0,0 +1,77 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#pragma once + +#include "arrow/pretty_print.h" + +#include + +namespace arrow::matlab::tabular { + + namespace { + arrow::PrettyPrintOptions make_pretty_print_options() { + auto opts = arrow::PrettyPrintOptions::Defaults(); + opts.skip_new_lines = true; + opts.array_delimiters.open = ""; + opts.array_delimiters.close = ""; + opts.chunked_array_delimiters.open = ""; + opts.chunked_array_delimiters.close = ""; + return opts; + } + } + + template + arrow::Result get_row_as_string(const std::shared_ptr& tabular_object, const int64_t matlab_row_index) { + std::stringstream ss; + const int64_t row_index = matlab_row_index - 1; + if (row_index >= tabular_object->num_rows() || row_index < 0) { + ss << "Invalid Row Index: " << matlab_row_index; + return arrow::Status::Invalid(ss.str()); + } + + const auto opts = make_pretty_print_options(); + const auto num_columns = tabular_object->num_columns(); + const auto& columns = tabular_object->columns(); + + for (int32_t i = 0; i < num_columns; ++i) { + const auto& column = columns[i]; + const auto type_id = column->type()->id(); + if (arrow::is_primitive(type_id) || arrow::is_string(type_id)) { + auto slice = column->Slice(row_index, 1); + ARROW_RETURN_NOT_OK(arrow::PrettyPrint(*slice, opts, &ss)); + } else if (type_id == arrow::Type::type::STRUCT) { + // Use as a placeholder since we don't have a good + // way to display StructArray elements horiztonally on screen. + ss << ""; + } else if (type_id == arrow::Type::type::LIST) { + // Use as a placeholder since we don't have a good + // way to display ListArray elements horiztonally on screen. + ss << ""; + } else { + return arrow::Status::NotImplemented("Datatype " + column->type()->ToString() + "is not currently supported for display."); + } + + if (i + 1 < num_columns) { + // Only add the delimiter if there is at least + // one more element to print. + ss << " | "; + } + } + return ss.str(); + } +} \ No newline at end of file diff --git a/matlab/src/cpp/arrow/matlab/tabular/proxy/record_batch.cc b/matlab/src/cpp/arrow/matlab/tabular/proxy/record_batch.cc index 679c7382f6532..7d24ad01d7e73 100644 --- a/matlab/src/cpp/arrow/matlab/tabular/proxy/record_batch.cc +++ b/matlab/src/cpp/arrow/matlab/tabular/proxy/record_batch.cc @@ -23,6 +23,7 @@ #include "arrow/matlab/error/error.h" #include "arrow/matlab/tabular/proxy/record_batch.h" #include "arrow/matlab/tabular/proxy/schema.h" +#include "arrow/matlab/tabular/get_row_as_string.h" #include "arrow/type.h" #include "arrow/util/utf8.h" @@ -58,6 +59,7 @@ namespace arrow::matlab::tabular::proxy { REGISTER_METHOD(RecordBatch, getColumnByIndex); REGISTER_METHOD(RecordBatch, getColumnByName); REGISTER_METHOD(RecordBatch, getSchema); + REGISTER_METHOD(RecordBatch, getRowAsString); } std::shared_ptr RecordBatch::unwrap() { @@ -218,4 +220,20 @@ namespace arrow::matlab::tabular::proxy { context.outputs[0] = schema_proxy_id_mda; } + void RecordBatch::getRowAsString(libmexclass::proxy::method::Context& context) { + namespace mda = ::matlab::data; + using namespace libmexclass::proxy; + mda::ArrayFactory factory; + + mda::StructArray args = context.inputs[0]; + const mda::TypedArray index_mda = args[0]["Index"]; + const auto matlab_row_index = int64_t(index_mda[0]); + + MATLAB_ASSIGN_OR_ERROR_WITH_CONTEXT(auto row_str_utf8, arrow::matlab::tabular::get_row_as_string(record_batch, matlab_row_index), + context, error::TABULAR_GET_ROW_AS_STRING_FAILED); + MATLAB_ASSIGN_OR_ERROR_WITH_CONTEXT(auto row_str_utf16, arrow::util::UTF8StringToUTF16(row_str_utf8), + context, error::UNICODE_CONVERSION_ERROR_ID); + context.outputs[0] = factory.createScalar(row_str_utf16); + } + } diff --git a/matlab/src/cpp/arrow/matlab/tabular/proxy/record_batch.h b/matlab/src/cpp/arrow/matlab/tabular/proxy/record_batch.h index b136ad1ea5db1..c417d8198f9ad 100644 --- a/matlab/src/cpp/arrow/matlab/tabular/proxy/record_batch.h +++ b/matlab/src/cpp/arrow/matlab/tabular/proxy/record_batch.h @@ -41,6 +41,7 @@ namespace arrow::matlab::tabular::proxy { void getColumnByIndex(libmexclass::proxy::method::Context& context); void getColumnByName(libmexclass::proxy::method::Context& context); void getSchema(libmexclass::proxy::method::Context& context); + void getRowAsString(libmexclass::proxy::method::Context& context); std::shared_ptr record_batch; }; diff --git a/matlab/src/cpp/arrow/matlab/tabular/proxy/table.cc b/matlab/src/cpp/arrow/matlab/tabular/proxy/table.cc index 228e28dad9e9c..cf628407b1742 100644 --- a/matlab/src/cpp/arrow/matlab/tabular/proxy/table.cc +++ b/matlab/src/cpp/arrow/matlab/tabular/proxy/table.cc @@ -24,6 +24,8 @@ #include "arrow/matlab/error/error.h" #include "arrow/matlab/tabular/proxy/table.h" #include "arrow/matlab/tabular/proxy/schema.h" +#include "arrow/matlab/tabular/get_row_as_string.h" + #include "arrow/type.h" #include "arrow/util/utf8.h" @@ -57,6 +59,7 @@ namespace arrow::matlab::tabular::proxy { REGISTER_METHOD(Table, getSchema); REGISTER_METHOD(Table, getColumnByIndex); REGISTER_METHOD(Table, getColumnByName); + REGISTER_METHOD(Table, getRowAsString); } std::shared_ptr Table::unwrap() { @@ -212,4 +215,20 @@ namespace arrow::matlab::tabular::proxy { context.outputs[0] = chunked_array_proxy_id_mda; } + void Table::getRowAsString(libmexclass::proxy::method::Context& context) { + namespace mda = ::matlab::data; + using namespace libmexclass::proxy; + mda::ArrayFactory factory; + + mda::StructArray args = context.inputs[0]; + const mda::TypedArray index_mda = args[0]["Index"]; + const auto matlab_row_index = int64_t(index_mda[0]); + + MATLAB_ASSIGN_OR_ERROR_WITH_CONTEXT(auto row_str_utf8, arrow::matlab::tabular::get_row_as_string(table, matlab_row_index), + context, error::TABULAR_GET_ROW_AS_STRING_FAILED); + MATLAB_ASSIGN_OR_ERROR_WITH_CONTEXT(auto row_str_utf16, arrow::util::UTF8StringToUTF16(row_str_utf8), + context, error::UNICODE_CONVERSION_ERROR_ID); + context.outputs[0] = factory.createScalar(row_str_utf16); + } + } diff --git a/matlab/src/cpp/arrow/matlab/tabular/proxy/table.h b/matlab/src/cpp/arrow/matlab/tabular/proxy/table.h index dae86a180b7a6..bfcea15bbd1c3 100644 --- a/matlab/src/cpp/arrow/matlab/tabular/proxy/table.h +++ b/matlab/src/cpp/arrow/matlab/tabular/proxy/table.h @@ -41,6 +41,7 @@ namespace arrow::matlab::tabular::proxy { void getSchema(libmexclass::proxy::method::Context& context); void getColumnByIndex(libmexclass::proxy::method::Context& context); void getColumnByName(libmexclass::proxy::method::Context& context); + void getRowAsString(libmexclass::proxy::method::Context& context); std::shared_ptr table; }; diff --git a/matlab/src/matlab/+arrow/+array/+internal/+display/getHeader.m b/matlab/src/matlab/+arrow/+array/+internal/+display/getHeader.m index 5c8704d5bf2a4..85301ddefaada 100644 --- a/matlab/src/matlab/+arrow/+array/+internal/+display/getHeader.m +++ b/matlab/src/matlab/+arrow/+array/+internal/+display/getHeader.m @@ -16,7 +16,7 @@ % permissions and limitations under the License. function header = getHeader(className, numElements, numNulls) - import arrow.array.internal.display.pluralizeStringIfNeeded + import arrow.internal.display.pluralizeStringIfNeeded elementString = pluralizeStringIfNeeded(numElements, "element"); nullString = pluralizeStringIfNeeded(numNulls, "null value"); diff --git a/matlab/src/matlab/+arrow/+array/+internal/+list/ClassTypeValidator.m b/matlab/src/matlab/+arrow/+array/+internal/+list/ClassTypeValidator.m new file mode 100644 index 0000000000000..419560b8d566b --- /dev/null +++ b/matlab/src/matlab/+arrow/+array/+internal/+list/ClassTypeValidator.m @@ -0,0 +1,47 @@ +% Licensed to the Apache Software Foundation (ASF) under one or more +% contributor license agreements. See the NOTICE file distributed with +% this work for additional information regarding copyright ownership. +% The ASF licenses this file to you under the Apache License, Version +% 2.0 (the "License"); you may not use this file except in compliance +% with the License. You may obtain a copy of the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, +% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +% implied. See the License for the specific language governing +% permissions and limitations under the License. + +classdef ClassTypeValidator < arrow.array.internal.list.Validator + + properties (GetAccess=public, SetAccess=private) + ClassName(1, 1) string + end + + methods + function obj = ClassTypeValidator(data) + obj.ClassName = class(data); + end + + function validateElement(obj, element) + if ~isa(element, obj.ClassName) + id = "arrow:array:list:ClassTypeMismatch"; + fmt = "Expected all cell array elements to have class type " + ... + """%s"", but encountered an element whose class type is" + ... + " ""%s""."; + msg = compose(fmt, obj.ClassName, class(element)); + error(id, msg); + end + end + + function length = getElementLength(~, element) + length = numel(element); + end + + function C = reshapeCellElements(~, C) + C = cellfun(@(elem) reshape(elem, [], 1), C, UniformOutput=false); + end + end +end + diff --git a/matlab/src/matlab/+arrow/+array/+internal/+list/DatetimeValidator.m b/matlab/src/matlab/+arrow/+array/+internal/+list/DatetimeValidator.m new file mode 100644 index 0000000000000..f8665d822795e --- /dev/null +++ b/matlab/src/matlab/+arrow/+array/+internal/+list/DatetimeValidator.m @@ -0,0 +1,49 @@ +% Licensed to the Apache Software Foundation (ASF) under one or more +% contributor license agreements. See the NOTICE file distributed with +% this work for additional information regarding copyright ownership. +% The ASF licenses this file to you under the Apache License, Version +% 2.0 (the "License"); you may not use this file except in compliance +% with the License. You may obtain a copy of the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, +% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +% implied. See the License for the specific language governing +% permissions and limitations under the License. + +classdef DatetimeValidator < arrow.array.internal.list.ClassTypeValidator + + properties (GetAccess=public, SetAccess=private) + Zoned (1, 1) logical = false + end + + methods + function obj = DatetimeValidator(date) + arguments + date(:, :) datetime + end + obj@arrow.array.internal.list.ClassTypeValidator(date); + obj.Zoned = ~isempty(date.TimeZone); + end + + function validateElement(obj, element) + validateElement@arrow.array.internal.list.ClassTypeValidator(obj, element); + % zoned and obj.Zoned must be equal because zoned + % and unzoned datetimes cannot be concatenated together. + zoned = ~isempty(element.TimeZone); + if obj.Zoned && ~zoned + errorID = "arrow:array:list:ExpectedZonedDatetime"; + msg = "Expected all datetime elements in the cell array to " + ... + "have a time zone but encountered a datetime array without a time zone"; + error(errorID, msg); + elseif ~obj.Zoned && zoned + errorID = "arrow:array:list:ExpectedUnzonedDatetime"; + msg = "Expected all datetime elements in the cell array to " + ... + "not have a time zone but encountered a datetime array with a time zone"; + error(errorID, msg); + end + end + end +end \ No newline at end of file diff --git a/matlab/src/matlab/+arrow/+array/+internal/+list/TableValidator.m b/matlab/src/matlab/+arrow/+array/+internal/+list/TableValidator.m new file mode 100644 index 0000000000000..d62be5fb31fdb --- /dev/null +++ b/matlab/src/matlab/+arrow/+array/+internal/+list/TableValidator.m @@ -0,0 +1,90 @@ +% Licensed to the Apache Software Foundation (ASF) under one or more +% contributor license agreements. See the NOTICE file distributed with +% this work for additional information regarding copyright ownership. +% The ASF licenses this file to you under the Apache License, Version +% 2.0 (the "License"); you may not use this file except in compliance +% with the License. You may obtain a copy of the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, +% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +% implied. See the License for the specific language governing +% permissions and limitations under the License. + +classdef TableValidator < arrow.array.internal.list.ClassTypeValidator + + properties (GetAccess=public, SetAccess=private) + VariableNames string = string.empty(1, 0) + VariableValidators arrow.array.internal.list.Validator = arrow.array.internal.list.Validator.empty(1, 0) + end + + methods + function obj = TableValidator(T) + arguments + T table + end + + numVars = width(T); + + if (numVars == 0) + error("arrow:array:list:TableWithZeroVariables", ... + "Expected table to have at least one variable."); + end + + obj@arrow.array.internal.list.ClassTypeValidator(table); + obj.VariableNames = string(T.Properties.VariableNames); + validators = cell([1 numVars]); + for ii = 1:numVars + validators{ii} = arrow.array.internal.list.createValidator(T.(ii)); + end + + obj.VariableValidators = [validators{:}]; + end + + function validateElement(obj, element) + % Verify element is a table + validateElement@arrow.array.internal.list.ClassTypeValidator(obj, element); + + % Validate element has the expected number of variables + numVars = numel(obj.VariableNames); + if width(element) ~= numVars + id = "arrow:array:list:NumVariablesMismatch"; + msg = "Expected all tables in the cell array to have " + ... + string(numVars) + " variables."; + error(id, msg); + end + + % Validate element has the expected variable names + if ~all(obj.VariableNames == string(element.Properties.VariableNames)) + id = "arrow:array:list:VariableNamesMismatch"; + msg = "Expected all tables in the cell array to have the " + ... + "same variable names."; + error(id, msg); + end + + for ii=1:numVars + var = element.(ii); + + % In order to concatenate tables together later, require + % all non-tabular variables to be columnar or empty. + if ~istable(var) && (~iscolumn(var) && ~isempty(var)) + id = "arrow:array:list:NonTabularVariablesMustBeColumnar"; + msg = "Expected all variables except for nested tables to be columnar."; + error(id, msg); + end + + obj.VariableValidators(ii).validateElement(var); + end + end + + function length = getElementLength(~, element) + length = height(element); + end + + function C = reshapeCellElements(~, C) + % NO-OP for cell array of tables + end + end +end \ No newline at end of file diff --git a/matlab/src/matlab/+arrow/+array/+internal/+list/Validator.m b/matlab/src/matlab/+arrow/+array/+internal/+list/Validator.m new file mode 100644 index 0000000000000..a8217f0afae67 --- /dev/null +++ b/matlab/src/matlab/+arrow/+array/+internal/+list/Validator.m @@ -0,0 +1,28 @@ +%VALIDATOR Defines interface used to validate MATLAB cell arrays +%can be converted into Arrow List arrays. + +% Licensed to the Apache Software Foundation (ASF) under one or more +% contributor license agreements. See the NOTICE file distributed with +% this work for additional information regarding copyright ownership. +% The ASF licenses this file to you under the Apache License, Version +% 2.0 (the "License"); you may not use this file except in compliance +% with the License. You may obtain a copy of the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, +% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +% implied. See the License for the specific language governing +% permissions and limitations under the License. + +classdef Validator < matlab.mixin.Heterogeneous + + methods (Abstract) + tf = validateElement(obj, element) + + length = getElementLength(obj, element) + + C = reshapeCellElements(obj, element) + end +end \ No newline at end of file diff --git a/matlab/src/matlab/+arrow/+array/+internal/+list/createValidator.m b/matlab/src/matlab/+arrow/+array/+internal/+list/createValidator.m new file mode 100644 index 0000000000000..856143f8e574f --- /dev/null +++ b/matlab/src/matlab/+arrow/+array/+internal/+list/createValidator.m @@ -0,0 +1,42 @@ +% Licensed to the Apache Software Foundation (ASF) under one or more +% contributor license agreements. See the NOTICE file distributed with +% this work for additional information regarding copyright ownership. +% The ASF licenses this file to you under the Apache License, Version +% 2.0 (the "License"); you may not use this file except in compliance +% with the License. You may obtain a copy of the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, +% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +% implied. See the License for the specific language governing +% permissions and limitations under the License. + +function validator = createValidator(data) + import arrow.array.internal.list.ClassTypeValidator + import arrow.array.internal.list.DatetimeValidator + import arrow.array.internal.list.TableValidator + + if isnumeric(data) + validator = ClassTypeValidator(data); + elseif islogical(data) + validator = ClassTypeValidator(data); + elseif isduration(data) + validator = ClassTypeValidator(data); + elseif isstring(data) + validator = ClassTypeValidator(data); + elseif iscell(data) + validator = ClassTypeValidator(data); + elseif isdatetime(data) + validator = DatetimeValidator(data); + elseif istable(data) + validator = TableValidator(data); + else + errorID = "arrow:array:list:UnsupportedDataType"; + msg = "Unable to create a ListArray from a cell array containing " + class(data) + " values."; + error(errorID, msg); + end + +end + diff --git a/matlab/src/matlab/+arrow/+array/+internal/+list/findFirstNonMissingElement.m b/matlab/src/matlab/+arrow/+array/+internal/+list/findFirstNonMissingElement.m new file mode 100644 index 0000000000000..5b29443568e6e --- /dev/null +++ b/matlab/src/matlab/+arrow/+array/+internal/+list/findFirstNonMissingElement.m @@ -0,0 +1,24 @@ +% Licensed to the Apache Software Foundation (ASF) under one or more +% contributor license agreements. See the NOTICE file distributed with +% this work for additional information regarding copyright ownership. +% The ASF licenses this file to you under the Apache License, Version +% 2.0 (the "License"); you may not use this file except in compliance +% with the License. You may obtain a copy of the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, +% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +% implied. See the License for the specific language governing +% permissions and limitations under the License. + +function idx = findFirstNonMissingElement(C) + idx = -1; + for ii=1:numel(C) + if ~isa(C{ii}, "missing") + idx = ii; + return; + end + end +end \ No newline at end of file diff --git a/matlab/src/matlab/+arrow/+array/Array.m b/matlab/src/matlab/+arrow/+array/Array.m index 293ad87ad4316..4402055932b60 100644 --- a/matlab/src/matlab/+arrow/+array/Array.m +++ b/matlab/src/matlab/+arrow/+array/Array.m @@ -98,4 +98,14 @@ function displayScalarObject(obj) tf = obj.Proxy.isEqual(proxyIDs); end end + + methods (Hidden) + function array = slice(obj, offset, length) + sliceStruct = struct(Offset=offset, Length=length); + arrayStruct = obj.Proxy.slice(sliceStruct); + traits = arrow.type.traits.traits(arrow.type.ID(arrayStruct.TypeID)); + proxy = libmexclass.proxy.Proxy(Name=traits.ArrayProxyClassName, ID=arrayStruct.ProxyID); + array = traits.ArrayConstructor(proxy); + end + end end diff --git a/matlab/src/matlab/+arrow/+array/ListArray.m b/matlab/src/matlab/+arrow/+array/ListArray.m index f8fd934b7c448..0febdd3831c07 100644 --- a/matlab/src/matlab/+arrow/+array/ListArray.m +++ b/matlab/src/matlab/+arrow/+array/ListArray.m @@ -79,8 +79,9 @@ offsets (1, 1) arrow.array.Int32Array values (1, 1) arrow.array.Array opts.Valid + opts.ValidationMode (1, 1) arrow.array.ValidationMode = arrow.array.ValidationMode.Minimal end - + import arrow.internal.validate.parseValid if nargin < 2 @@ -100,12 +101,68 @@ ValuesProxyID=valuesProxyID, ... Valid=validElements ... ); - + proxyName = "arrow.array.proxy.ListArray"; proxy = arrow.internal.proxy.create(proxyName, args); + % Validate the provided offsets and values. + proxy.validate(struct(ValidationMode=uint8(opts.ValidationMode))); array = arrow.array.ListArray(proxy); end + function array = fromMATLAB(C) + arguments + C(:, 1) cell {mustBeNonempty} + end + import arrow.array.internal.list.findFirstNonMissingElement + import arrow.array.internal.list.createValidator + + idx = findFirstNonMissingElement(C); + + if idx == -1 + id = "arrow:array:list:CellArrayAllMissing"; + msg = "The input cell array must contain at least one non-missing" + ... + " value to be converted to an Arrow array."; + error(id, msg); + end + + validator = createValidator(C{idx}); + + numElements = numel(C); + valid = true([numElements 1]); + % All elements before the first non-missing value should be + % treated as null values. + valid(1:idx-1) = false; + offsets = zeros([numElements + 1, 1], "int32"); + + for ii = idx:numElements + element = C{ii}; + if isa(element, "missing") + % Treat missing values as null values. + valid(ii) = false; + offsets(ii + 1) = offsets(ii); + else + validator.validateElement(element); + length = validator.getElementLength(element); + offsets(ii + 1) = offsets(ii) + length; + end + end + + offsetArray = arrow.array(offsets); + + validValueCellArray = validator.reshapeCellElements(C(valid)); + values = vertcat(validValueCellArray{:}); + valueArray = arrow.array(values); + + args = struct(... + OffsetsProxyID=offsetArray.Proxy.ID, ... + ValuesProxyID=valueArray.Proxy.ID, ... + Valid=valid ... + ); + + proxyName = "arrow.array.proxy.ListArray"; + proxy = arrow.internal.proxy.create(proxyName, args); + array = arrow.array.ListArray(proxy); + end end end diff --git a/matlab/src/matlab/+arrow/+array/ValidationMode.m b/matlab/src/matlab/+arrow/+array/ValidationMode.m new file mode 100644 index 0000000000000..3442bcccf725b --- /dev/null +++ b/matlab/src/matlab/+arrow/+array/ValidationMode.m @@ -0,0 +1,24 @@ +% Mode to use for Array validation. + +% Licensed to the Apache Software Foundation (ASF) under one or more +% contributor license agreements. See the NOTICE file distributed with +% this work for additional information regarding copyright ownership. +% The ASF licenses this file to you under the Apache License, Version +% 2.0 (the "License"); you may not use this file except in compliance +% with the License. You may obtain a copy of the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, +% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +% implied. See the License for the specific language governing +% permissions and limitations under the License. + +classdef ValidationMode < uint8 + enumeration + None (0) + Minimal (1) + Full (2) + end +end \ No newline at end of file diff --git a/matlab/src/matlab/+arrow/+internal/+display/boldFontIfPossible.m b/matlab/src/matlab/+arrow/+internal/+display/boldFontIfPossible.m new file mode 100644 index 0000000000000..cb980cbff99e8 --- /dev/null +++ b/matlab/src/matlab/+arrow/+internal/+display/boldFontIfPossible.m @@ -0,0 +1,26 @@ +%BOLDFONTIFPOSSIBLE Bolds the input string if possible + +% Licensed to the Apache Software Foundation (ASF) under one or more +% contributor license agreements. See the NOTICE file distributed with +% this work for additional information regarding copyright ownership. +% The ASF licenses this file to you under the Apache License, Version +% 2.0 (the "License"); you may not use this file except in compliance +% with the License. You may obtain a copy of the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, +% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +% implied. See the License for the specific language governing +% permissions and limitations under the License. + +function str = boldFontIfPossible(str) + + arguments + str(1, 1) string {mustBeNonzeroLengthText} + end + if usejava("desktop") + str = compose("%s", str); + end +end \ No newline at end of file diff --git a/matlab/src/matlab/+arrow/+array/+internal/+display/pluralizeStringIfNeeded.m b/matlab/src/matlab/+arrow/+internal/+display/pluralizeStringIfNeeded.m similarity index 100% rename from matlab/src/matlab/+arrow/+array/+internal/+display/pluralizeStringIfNeeded.m rename to matlab/src/matlab/+arrow/+internal/+display/pluralizeStringIfNeeded.m diff --git a/matlab/src/matlab/+arrow/+internal/+test/+tabular/createAllSupportedArrayTypes.m b/matlab/src/matlab/+arrow/+internal/+test/+tabular/createAllSupportedArrayTypes.m index ad2f026d64e20..a9682d317354b 100644 --- a/matlab/src/matlab/+arrow/+internal/+test/+tabular/createAllSupportedArrayTypes.m +++ b/matlab/src/matlab/+arrow/+internal/+test/+tabular/createAllSupportedArrayTypes.m @@ -24,8 +24,8 @@ end % Seed the random number generator to ensure - % reproducible results in tests. - rng(1); + % reproducible results in tests across MATLAB sessions. + rng(1, "twister"); import arrow.type.ID import arrow.array.* @@ -101,6 +101,7 @@ % Return the class names as a string array classes = string({metaClass.Name}); + classes = sort(classes); end function dict = getNumericArrayToMatlabDictionary() diff --git a/matlab/src/matlab/+arrow/+tabular/+internal/displaySchema.m b/matlab/src/matlab/+arrow/+tabular/+internal/+display/getSchemaString.m similarity index 92% rename from matlab/src/matlab/+arrow/+tabular/+internal/displaySchema.m rename to matlab/src/matlab/+arrow/+tabular/+internal/+display/getSchemaString.m index 8d6740b195abc..7da945ca993ef 100644 --- a/matlab/src/matlab/+arrow/+tabular/+internal/displaySchema.m +++ b/matlab/src/matlab/+arrow/+tabular/+internal/+display/getSchemaString.m @@ -1,4 +1,5 @@ -%DISPLAYSCHEMA Generates arrow.tabular.Schema display text. +%GETSCHEMASTRING Generates a string representation of an +% arrow.tabular.Schema object. % Licensed to the Apache Software Foundation (ASF) under one or more % contributor license agreements. See the NOTICE file distributed with @@ -15,7 +16,7 @@ % implied. See the License for the specific language governing % permissions and limitations under the License. -function text = displaySchema(schema) +function text = getSchemaString(schema) fields = schema.Fields; names = [fields.Name]; types = [fields.Type]; @@ -46,5 +47,5 @@ end text = names + ": " + typeIDs; - text = " " + strjoin(text, " | "); + text = strjoin(text, " | "); end \ No newline at end of file diff --git a/matlab/src/matlab/+arrow/+tabular/+internal/+display/getTabularDisplay.m b/matlab/src/matlab/+arrow/+tabular/+internal/+display/getTabularDisplay.m new file mode 100644 index 0000000000000..054922fa03c75 --- /dev/null +++ b/matlab/src/matlab/+arrow/+tabular/+internal/+display/getTabularDisplay.m @@ -0,0 +1,44 @@ +%GETTABULARDISPLAY Generates the display for arrow.tabular.Table and +% arrow.tabular.RecordBatch. + +% Licensed to the Apache Software Foundation (ASF) under one or more +% contributor license agreements. See the NOTICE file distributed with +% this work for additional information regarding copyright ownership. +% The ASF licenses this file to you under the Apache License, Version +% 2.0 (the "License"); you may not use this file except in compliance +% with the License. You may obtain a copy of the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, +% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +% implied. See the License for the specific language governing +% permissions and limitations under the License. + +function tabularDisplay = getTabularDisplay(tabularObj, className) + import arrow.tabular.internal.display.getSchemaString + import arrow.tabular.internal.display.getTabularHeader + + numRows = tabularObj.NumRows; + numColumns = tabularObj.NumColumns; + tabularDisplay = getTabularHeader(className, numRows, numColumns); + + if numColumns > 0 + twoNewLines = string([newline newline]); + fourSpaces = string(repmat(' ', 1, 4)); + eightSpaces = string(repmat(' ', 1, 8)); + + schemaHeader = fourSpaces + "Schema:"; + schemaBody = eightSpaces + getSchemaString(tabularObj.Schema); + schemaDisplay = schemaHeader + twoNewLines + schemaBody; + tabularDisplay = tabularDisplay + twoNewLines + schemaDisplay; + + if numRows > 0 + rowHeader = fourSpaces + "First Row:"; + rowBody = eightSpaces + tabularObj.Proxy.getRowAsString(struct(Index=int64(1))); + rowDisplay = rowHeader + twoNewLines + rowBody; + tabularDisplay = tabularDisplay + twoNewLines + rowDisplay; + end + end +end diff --git a/matlab/src/matlab/+arrow/+tabular/+internal/+display/getTabularHeader.m b/matlab/src/matlab/+arrow/+tabular/+internal/+display/getTabularHeader.m new file mode 100644 index 0000000000000..4c647986ce055 --- /dev/null +++ b/matlab/src/matlab/+arrow/+tabular/+internal/+display/getTabularHeader.m @@ -0,0 +1,32 @@ +%GETTABULARHEADER Generates the display header for arrow.tabular.Table and +% arrow.tabular.RecordBatch. + +% Licensed to the Apache Software Foundation (ASF) under one or more +% contributor license agreements. See the NOTICE file distributed with +% this work for additional information regarding copyright ownership. +% The ASF licenses this file to you under the Apache License, Version +% 2.0 (the "License"); you may not use this file except in compliance +% with the License. You may obtain a copy of the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, +% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +% implied. See the License for the specific language governing +% permissions and limitations under the License. + +function header = getTabularHeader(className, numRows, numColumns) + import arrow.internal.display.boldFontIfPossible + import arrow.internal.display.pluralizeStringIfNeeded + + numRowsString = boldFontIfPossible(numRows); + numColsString = boldFontIfPossible(numColumns); + rowWordString = pluralizeStringIfNeeded(numRows, "row"); + colWordString = pluralizeStringIfNeeded(numColumns, "column"); + formatSpec = " Arrow %s with %s %s and %s %s"; + if numColumns > 0 + formatSpec = formatSpec + ":"; + end + header = compose(formatSpec,className, numRowsString, rowWordString, numColsString, colWordString); +end \ No newline at end of file diff --git a/matlab/src/matlab/+arrow/+tabular/RecordBatch.m b/matlab/src/matlab/+arrow/+tabular/RecordBatch.m index fdedaecb5eb3a..0225f3d771181 100644 --- a/matlab/src/matlab/+arrow/+tabular/RecordBatch.m +++ b/matlab/src/matlab/+arrow/+tabular/RecordBatch.m @@ -112,7 +112,9 @@ methods (Access=protected) function displayScalarObject(obj) - disp(obj.toString()); + className = matlab.mixin.CustomDisplay.getClassNameForHeader(obj); + tabularDisplay = arrow.tabular.internal.display.getTabularDisplay(obj, className); + disp(tabularDisplay + newline); end end diff --git a/matlab/src/matlab/+arrow/+tabular/Schema.m b/matlab/src/matlab/+arrow/+tabular/Schema.m index 3ee40f0e14293..a50522c6b5283 100644 --- a/matlab/src/matlab/+arrow/+tabular/Schema.m +++ b/matlab/src/matlab/+arrow/+tabular/Schema.m @@ -116,7 +116,7 @@ function displayScalarObject(obj) numFields = obj.NumFields; if numFields > 0 - text = arrow.tabular.internal.displaySchema(obj); + text = " " + arrow.tabular.internal.display.getSchemaString(obj); disp(text + newline); end diff --git a/matlab/src/matlab/+arrow/+tabular/Table.m b/matlab/src/matlab/+arrow/+tabular/Table.m index c2f73450408ef..1ed205d639747 100644 --- a/matlab/src/matlab/+arrow/+tabular/Table.m +++ b/matlab/src/matlab/+arrow/+tabular/Table.m @@ -112,9 +112,10 @@ end methods (Access=protected) - function displayScalarObject(obj) - disp(obj.toString()); + className = matlab.mixin.CustomDisplay.getClassNameForHeader(obj); + tabularDisplay = arrow.tabular.internal.display.getTabularDisplay(obj, className); + disp(tabularDisplay + newline); end end diff --git a/matlab/src/matlab/+arrow/+type/+traits/ListTraits.m b/matlab/src/matlab/+arrow/+type/+traits/ListTraits.m index 26a0c6d340603..5404e0716c84a 100644 --- a/matlab/src/matlab/+arrow/+type/+traits/ListTraits.m +++ b/matlab/src/matlab/+arrow/+type/+traits/ListTraits.m @@ -19,7 +19,7 @@ ArrayConstructor = @arrow.array.ListArray ArrayClassName = "arrow.array.ListArray" ArrayProxyClassName = "arrow.array.proxy.ListArray" - ArrayStaticConstructor = missing + ArrayStaticConstructor = @arrow.array.ListArray.fromMATLAB TypeConstructor = @arrow.type.ListType TypeClassName = "arrow.type.ListType" TypeProxyClassName = "arrow.type.proxy.ListType" diff --git a/matlab/src/matlab/+arrow/array.m b/matlab/src/matlab/+arrow/array.m index 50221b4b955df..073715ece7713 100644 --- a/matlab/src/matlab/+arrow/array.m +++ b/matlab/src/matlab/+arrow/array.m @@ -49,6 +49,8 @@ arrowArray = arrow.array.Time64Array.fromMATLAB(data, varargin{:}); case "table" arrowArray = arrow.array.StructArray.fromMATLAB(data, varargin{:}); + case "cell" + arrowArray = arrow.array.ListArray.fromMATLAB(data, varargin{:}); otherwise errid = "arrow:array:UnsupportedMATLABType"; msg = join(["Unable to convert MATLAB type" classname "to arrow array."]); diff --git a/matlab/test/arrow/array/list/tClassTypeValidator.m b/matlab/test/arrow/array/list/tClassTypeValidator.m new file mode 100644 index 0000000000000..ae6fe7c855754 --- /dev/null +++ b/matlab/test/arrow/array/list/tClassTypeValidator.m @@ -0,0 +1,99 @@ +%TCLASSTYPEVALIDATOR Unit tests for arrow.array.internal.list.ClassTypeValidator + +% Licensed to the Apache Software Foundation (ASF) under one or more +% contributor license agreements. See the NOTICE file distributed with +% this work for additional information regarding copyright ownership. +% The ASF licenses this file to you under the Apache License, Version +% 2.0 (the "License"); you may not use this file except in compliance +% with the License. You may obtain a copy of the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, +% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +% implied. See the License for the specific language governing +% permissions and limitations under the License. + +classdef tClassTypeValidator < matlab.unittest.TestCase + + methods (Test) + function Smoke(testCase) + import arrow.array.internal.list.ClassTypeValidator + validator = ClassTypeValidator("Sample Data"); + testCase.verifyInstanceOf(validator, "arrow.array.internal.list.ClassTypeValidator"); + end + + function ClassNameGetter(testCase) + % Verify the ClassName getter returns the expected scalar + % string. + import arrow.array.internal.list.ClassTypeValidator + + validator = ClassTypeValidator("Sample Data"); + testCase.verifyEqual(validator.ClassName, "string"); + end + + function ClassNameNoSetter(testCase) + % Verify ClassName property is not settable. + import arrow.array.internal.list.ClassTypeValidator + + validator = ClassTypeValidator(1); + fcn = @() setfield(validator, "ClassName", "duration"); + testCase.verifyError(fcn, "MATLAB:class:SetProhibited"); + end + + function ValidateElementNoThrow(testCase) %#ok + % Verify validateElement does not throw an exception + % if class type of the input element matches the ClassName + % property value. + import arrow.array.internal.list.ClassTypeValidator + + validator = ClassTypeValidator(1); + validator.validateElement(2); + validator.validateElement([1 2 3]); + validator.validateElement([1; 2; 3; 3]); + validator.validateElement([5 6; 7 8]); + validator.validateElement(double.empty(0, 1)); + end + + function ValidateElementClassTypeMismatchError(testCase) + % Verify validateElement throws an exception whose identifier + % is "arrow:array:list:ClassTypeMismatch" if the input + % element's class type does not match the ClassName property + % value. + import arrow.array.internal.list.ClassTypeValidator + + % validator will expect all elements to be of type double, since "1" is a double. + validator = ClassTypeValidator(1); + errorID = "arrow:array:list:ClassTypeMismatch"; + testCase.verifyError(@() validator.validateElement("A"), errorID); + testCase.verifyError(@() validator.validateElement(uint8([1 2])), errorID); + testCase.verifyError(@() validator.validateElement(datetime(2023, 1, 1)), errorID); + end + + function GetElementLength(testCase) + % Verify getElementLength returns the expected length values + % for the given input arrays. + import arrow.array.internal.list.ClassTypeValidator + + validator = ClassTypeValidator(1); + testCase.verifyEqual(validator.getElementLength(2), 1); + testCase.verifyEqual(validator.getElementLength([1 2; 3 4]), 4); + testCase.verifyEqual(validator.getElementLength(double.empty(1, 0)), 0); + end + + function ReshapeCellElements(testCase) + % Verify reshapeCellElements reshapes all elements in the input + % cell array into column vectors. + import arrow.array.internal.list.ClassTypeValidator + + validator = ClassTypeValidator(1); + C = {[1 2 3], [4; 5], [6 7; 8 9], double.empty(1, 0), 10}; + act = validator.reshapeCellElements(C); + exp = {[1; 2; 3], [4; 5], [6; 8; 7; 9], double.empty(0, 1), 10}; + testCase.verifyEqual(act, exp); + end + + end + +end \ No newline at end of file diff --git a/matlab/test/arrow/array/list/tCreateValidator.m b/matlab/test/arrow/array/list/tCreateValidator.m new file mode 100644 index 0000000000000..d95af334ae76a --- /dev/null +++ b/matlab/test/arrow/array/list/tCreateValidator.m @@ -0,0 +1,131 @@ +%TCREATEVALIDATOR Unit tests for arrow.array.internal.list.createValidator. + +% Licensed to the Apache Software Foundation (ASF) under one or more +% contributor license agreements. See the NOTICE file distributed with +% this work for additional information regarding copyright ownership. +% The ASF licenses this file to you under the Apache License, Version +% 2.0 (the "License"); you may not use this file except in compliance +% with the License. You may obtain a copy of the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, +% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +% implied. See the License for the specific language governing +% permissions and limitations under the License. + +classdef tCreateValidator < matlab.unittest.TestCase + + properties (TestParameter) + NumericTypes + end + + methods (TestParameterDefinition, Static) + function NumericTypes = initializeNumericTypes() + NumericTypes = {"uint8", ... + "uint16", ... + "uint32", ... + "uint64", ... + "int8", ... + "int16", ... + "int32", ... + "int64", ... + "single", ... + "double"}; + end + end + + methods (Test) + function TestNumericTypes(testCase, NumericTypes) + % Verify createValidator returns a ClassTypeValidator with the + % expected ClassName value when given a numeric array as input. + import arrow.array.internal.list.createValidator + data = cast(1, NumericTypes); + validator = createValidator(data); + testCase.verifyInstanceOf(validator, "arrow.array.internal.list.ClassTypeValidator"); + testCase.verifyEqual(validator.ClassName, NumericTypes); + end + + function TestLogical(testCase) + % Verify createValidator returns a ClassTypeValidator whose + % ClassName property is set to "logical" when given a logical + % array as input. + import arrow.array.internal.list.createValidator + data = true; + validator = createValidator(data); + testCase.verifyInstanceOf(validator, "arrow.array.internal.list.ClassTypeValidator"); + testCase.verifyEqual(validator.ClassName, "logical"); + end + + function TestDuration(testCase) + % Verify createValidator returns a ClassTypeValidator whose + % ClassName property is set to "duration" when given a duration + % array as input. + import arrow.array.internal.list.createValidator + data = seconds(1); + validator = createValidator(data); + testCase.verifyInstanceOf(validator, "arrow.array.internal.list.ClassTypeValidator"); + testCase.verifyEqual(validator.ClassName, "duration"); + end + + function TestString(testCase) + % Verify createValidator returns a ClassTypeValidator whose + % ClassName property is set to "string" when given a string + % array as input. + import arrow.array.internal.list.createValidator + data = "Hello World"; + validator = createValidator(data); + testCase.verifyInstanceOf(validator, "arrow.array.internal.list.ClassTypeValidator"); + testCase.verifyEqual(validator.ClassName, "string"); + end + + function TestCell(testCase) + % Verify createValidator returns a ClassTypeValidator whose + % ClassName property is set to "cell" when given a cell + % array as input. + import arrow.array.internal.list.createValidator + data = {"Hello World"}; + validator = createValidator(data); + testCase.verifyInstanceOf(validator, "arrow.array.internal.list.ClassTypeValidator"); + testCase.verifyEqual(validator.ClassName, "cell"); + end + + function TestDatetime(testCase) + % Verify createValidator returns a DatetimeValidator when given + % a datetime array as input. + import arrow.array.internal.list.createValidator + data = datetime(2023, 10, 31); + validator = createValidator(data); + testCase.verifyInstanceOf(validator, "arrow.array.internal.list.DatetimeValidator"); + testCase.verifyEqual(validator.ClassName, "datetime"); + testCase.verifyEqual(validator.Zoned, false); + end + + function TestTable(testCase) + % Verify createValidator returns a TableValidator when given + % a table as input. + import arrow.array.internal.list.createValidator + data = table(1, "A", VariableNames=["Number", "Letter"]); + validator = createValidator(data); + testCase.verifyInstanceOf(validator, "arrow.array.internal.list.TableValidator"); + testCase.verifyEqual(validator.VariableNames, ["Number", "Letter"]); + testCase.verifyEqual(numel(validator.VariableValidators), 2); + testCase.verifyInstanceOf(validator.VariableValidators(1), "arrow.array.internal.list.ClassTypeValidator"); + testCase.verifyEqual(validator.VariableValidators(1).ClassName, "double"); + testCase.verifyInstanceOf(validator.VariableValidators(2), "arrow.array.internal.list.ClassTypeValidator"); + testCase.verifyEqual(validator.VariableValidators(2).ClassName, "string"); + + end + + function UnsupportedDataTypeError(testCase) + % Verify createValidator throws an exception whose identifier + % is "arrow:array:list:UnsupportedDataType" when given an + % unsupported datatype as input. + import arrow.array.internal.list.createValidator + data = calyears(1); + fcn = @() createValidator(data); + testCase.verifyError(fcn, "arrow:array:list:UnsupportedDataType"); + end + end +end \ No newline at end of file diff --git a/matlab/test/arrow/array/list/tDatetimeValidator.m b/matlab/test/arrow/array/list/tDatetimeValidator.m new file mode 100644 index 0000000000000..06d18bd92f747 --- /dev/null +++ b/matlab/test/arrow/array/list/tDatetimeValidator.m @@ -0,0 +1,181 @@ +%TDATETIMEVALIDATOR Unit tests for +%arrow.array.internal.list.DatetimeValidator + +% Licensed to the Apache Software Foundation (ASF) under one or more +% contributor license agreements. See the NOTICE file distributed with +% this work for additional information regarding copyright ownership. +% The ASF licenses this file to you under the Apache License, Version +% 2.0 (the "License"); you may not use this file except in compliance +% with the License. You may obtain a copy of the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, +% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +% implied. See the License for the specific language governing +% permissions and limitations under the License. + +classdef tDatetimeValidator < matlab.unittest.TestCase + + methods (Test) + function Smoke(testCase) + import arrow.array.internal.list.DatetimeValidator + validator = DatetimeValidator(datetime(2023, 10, 31)); + testCase.verifyInstanceOf(validator, "arrow.array.internal.list.DatetimeValidator"); + end + + function ClassNameGetter(testCase) + % Verify the ClassName getter returns the expected scalar + % string. + import arrow.array.internal.list.DatetimeValidator + + validator = DatetimeValidator(datetime(2023, 10, 31)); + testCase.verifyEqual(validator.ClassName, "datetime"); + end + + function ClassNameNoSetter(testCase) + % Verify ClassName property is not settable. + import arrow.array.internal.list.DatetimeValidator + + validator = DatetimeValidator(datetime(2023, 10, 31)); + fcn = @() setfield(validator, "ClassName", "duration"); + testCase.verifyError(fcn, "MATLAB:class:SetProhibited"); + end + + function ZonedGetter(testCase) + % Verify the Zoned getter returns the expected scalar + % logical. + + import arrow.array.internal.list.DatetimeValidator + validator = DatetimeValidator(datetime(2023, 10, 31)); + testCase.verifyEqual(validator.Zoned, false); + + validator = DatetimeValidator(datetime(2023, 10, 31, TimeZone="UTC")); + testCase.verifyEqual(validator.Zoned, true); + end + + function ZonedNoSetter(testCase) + % Verify Zoned property is not settable. + import arrow.array.internal.list.DatetimeValidator + + validator = DatetimeValidator(datetime(2023, 10, 31)); + fcn = @() setfield(validator, "Zoned", true); + testCase.verifyError(fcn, "MATLAB:class:SetProhibited"); + + validator = DatetimeValidator(datetime(2023, 10, 31, TimeZone="UTC")); + fcn = @() setfield(validator, "Zoned", false); + testCase.verifyError(fcn, "MATLAB:class:SetProhibited"); + end + + function ValidateElementNoThrow(testCase) %#ok + % Verify validateElement does not throw an exception if: + % 1. the input element is a datetime + % 2. its TimeZone property is '' and Zoned = false + % 3. its TimeZone property is not empty and Zoned = true + + import arrow.array.internal.list.DatetimeValidator + + validator = DatetimeValidator(datetime(2023, 10, 31)); + validator.validateElement(datetime(2023, 11, 1)); + validator.validateElement(datetime(2023, 11, 1) + days(0:2)); + validator.validateElement(datetime(2023, 11, 1) + days(0:2)'); + validator.validateElement(datetime.empty(0, 1)); + + validator = DatetimeValidator(datetime(2023, 10, 31, TimeZone="UTC")); + validator.validateElement(datetime(2023, 11, 1, TimeZone="UTC")); + validator.validateElement(datetime(2023, 11, 1, TimeZone="America/New_York") + days(0:2)); + validator.validateElement(datetime(2023, 11, 1, TimeZone="Pacific/Fiji") + days(0:2)'); + emptyDatetime = datetime.empty(0, 1); + emptyDatetime.TimeZone = "Asia/Dubai"; + validator.validateElement(emptyDatetime); + end + + function ValidateElementExpectedZonedDatetimeError(testCase) + % Verify validateElement throws an exception whose identifier + % is "arrow:array:list:ExpectedZonedDatetime" if the input + % datetime is unzoned, but the validator expected all + % datetimes to zoned. + import arrow.array.internal.list.DatetimeValidator + + % validator will expect all elements to be zoned datetimes + % because the input datetime is zoned. + validator = DatetimeValidator(datetime(2023, 10, 31, TimeZone="UTC")); + errorID = "arrow:array:list:ExpectedZonedDatetime"; + fcn = @() validator.validateElement(datetime(2023, 11, 1)); + testCase.verifyError(fcn, errorID); + end + + function ValidateElementExpectedUnzonedDatetimeError(testCase) + % Verify validateElement throws an exception whose identifier + % is "arrow:array:list:ExpectedUnzonedDatetime" if the input + % datetime has a time zone, but the validator expected all + % datetimes to be unzoned. + import arrow.array.internal.list.DatetimeValidator + + % validator will expect all elements to be unzoned datetimes + % because the input datetime is not zoned. + validator = DatetimeValidator(datetime(2023, 10, 31)); + errorID = "arrow:array:list:ExpectedUnzonedDatetime"; + fcn = @() validator.validateElement(datetime(2023, 11, 1, TimeZone="America/New_York")); + testCase.verifyError(fcn, errorID); + end + + function ValidateElementClassTypeMismatchError(testCase) + % Verify validateElement throws an exception whose identifier + % is "arrow:array:list:ClassTypeMismatch" if the input + % element is not a datetime. + import arrow.array.internal.list.DatetimeValidator + + validator = DatetimeValidator(datetime(2023, 10, 31)); + errorID = "arrow:array:list:ClassTypeMismatch"; + fcn = @() validator.validateElement(1); + testCase.verifyError(fcn, errorID); + fcn = @() validator.validateElement("A"); + testCase.verifyError(fcn, errorID); + fcn = @() validator.validateElement(seconds(1)); + testCase.verifyError(fcn, errorID); + end + + function GetElementLength(testCase) + % Verify getElementLength returns the expected length values + % for the given input arrays. + import arrow.array.internal.list.DatetimeValidator + + validator = DatetimeValidator(datetime(2023, 10, 31)); + length = validator.getElementLength(datetime.empty(0, 1)); + testCase.verifyEqual(length, 0); + length = validator.getElementLength(datetime(2023, 11, 1)); + testCase.verifyEqual(length, 1); + length = validator.getElementLength(datetime(2023, 11, 1) + days(0:2)); + testCase.verifyEqual(length, 3); + length = validator.getElementLength(datetime(2023, 11, 1) + days([0 1; 2 3])); + testCase.verifyEqual(length, 4); + end + + function ReshapeCellElements(testCase) + % Verify reshapeCellElements reshapes all elements in the input + % cell array into column vectors. + import arrow.array.internal.list.DatetimeValidator + + validator = DatetimeValidator(datetime(2023, 10, 31)); + date = datetime(2023, 10, 31); + + C = {date + days(0:2), ... + date + days(3:4)', ... + date + days([5 6; 7 8]), ... + datetime.empty(1, 0)}; + + act = validator.reshapeCellElements(C); + + exp = {date + days(0:2)', ... + date + days(3:4)', ... + date + days([5; 7; 6; 8]), ... + datetime.empty(0, 1)}; + + testCase.verifyEqual(act, exp); + end + + end + +end \ No newline at end of file diff --git a/matlab/test/arrow/array/list/tFromMATLAB.m b/matlab/test/arrow/array/list/tFromMATLAB.m new file mode 100644 index 0000000000000..ebc79ec0fd98b --- /dev/null +++ b/matlab/test/arrow/array/list/tFromMATLAB.m @@ -0,0 +1,206 @@ +%TFROMMATLAB Unit tests for arrow.array.ListArray's froMATLAB method. + +% Licensed to the Apache Software Foundation (ASF) under one or more +% contributor license agreements. See the NOTICE file distributed with +% this work for additional information regarding copyright ownership. +% The ASF licenses this file to you under the Apache License, Version +% 2.0 (the "License"); you may not use this file except in compliance +% with the License. You may obtain a copy of the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, +% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +% implied. See the License for the specific language governing +% permissions and limitations under the License. + +classdef tFromMATLAB < matlab.unittest.TestCase + + methods (Test) + function EmptyCellArrayError(testCase) + % Verify fromMATLAB throws an error whose identifier is + % "MATLAB:validators:mustBeNonempty" if given an empty cell + % array as input. + import arrow.array.ListArray + + fcn = @() ListArray.fromMATLAB({}); + testCase.verifyError(fcn, "MATLAB:validators:mustBeNonempty"); + end + + function MustBeCellArrayError(testCase) + % Verify fromMATLAB throws an error whose identifier is + % "MATLAB:validation:UnableToConvert" if the input provided is + % not a cell array. + import arrow.array.ListArray + + fcn = @() ListArray.fromMATLAB('a'); + testCase.verifyError(fcn, "MATLAB:validation:UnableToConvert"); + end + + function AllMissingCellArrayError(testCase) + % Verify fromMATLAB throws an error whose identifier is + % "arrow:array:list:CellArrayAllMissing" if given a cell array + % containing only missing values. + import arrow.array.ListArray + + C = {missing missing missing}; + fcn = @() ListArray.fromMATLAB(C); + testCase.verifyError(fcn, "arrow:array:list:CellArrayAllMissing"); + end + + function ListOfFloat64(testCase) + % Verify fromMATLAB creates the expected ListArray whose + % Values property is a Float64Array. + import arrow.array.ListArray + + C = {[1 2 3], [4 5], missing, [6 7 8], [], [9 10]}; + actual = ListArray.fromMATLAB(C); + + values = arrow.array(1:10); + offsets = arrow.array(int32([0 3 5 5 8 8 10])); + expected = ListArray.fromArrays(offsets, values, Valid=[1 2 4 5 6]); + + testCase.verifyEqual(actual, expected); + end + + function ListOfStruct(testCase) + % Verify fromMATLAB creates the expected ListArray whose + % Values property is a StructArray. + import arrow.array.ListArray + + Number = (1:10)'; + Text = compose("Test%d", (1:10)'); + Date = datetime(2023, 11, 2) + days(0:9)'; + T = table(Number, Text, Date); + C = {missing, T(1:3, :), T(4, :), T(1:0, :), T(5:10, :), missing}; + actual = ListArray.fromMATLAB(C); + + values = arrow.array(T); + offsets = arrow.array(int32([0 0 3 4 4 10 10])); + expected = ListArray.fromArrays(offsets, values, Valid=[2 3 4 5]); + + testCase.verifyEqual(actual, expected); + end + + function ListOfListOfString(testCase) + % Verify fromMATLAB creates the expected ListArray whose + % Values property is a ListArray. + import arrow.array.ListArray + + rowOne = {["A" "B"], ["C" "D" "E"] missing}; + rowTwo = missing; + rowThree = {"F" ["G" "H" "I"]}; + C = {rowOne, rowTwo rowThree}; + actual = ListArray.fromMATLAB(C); + + stringValues = arrow.array(["A" "B" "C" "D" "E" "F" "G" "H" "I"]); + innerOffsets = arrow.array(int32([0 2 5 5 6 9])); + valuesList = ListArray.fromArrays(innerOffsets, stringValues, Valid=[1 2 4 5]); + + outerOffsets = arrow.array(int32([0 3 3 5])); + expected = ListArray.fromArrays(outerOffsets, valuesList, Valid=[1 3]); + + testCase.verifyEqual(actual, expected); + end + + function OnlyEmptyElement(testCase) + % Create a ListArray containing only empty elements. + import arrow.array.ListArray + + emptyDuration = duration.empty(0, 0); + + C = {emptyDuration, emptyDuration, emptyDuration, emptyDuration}; + actual = ListArray.fromMATLAB(C); + + values = arrow.array(duration.empty); + offsets = arrow.array(int32([0 0 0 0 0])); + expected = ListArray.fromArrays(offsets, values); + + testCase.verifyEqual(actual, expected); + end + + function CellOfEmptyCell(testCase) + % Verify fromMATLAB creates a ListArray whose Values property + % is a StringArray when given a cell array containing just an + % empty cell array. + import arrow.array.ListArray + + C = {{}}; + actual = ListArray.fromMATLAB(C); + + values = arrow.array(string.empty); + offsets = arrow.array(int32([0 0])); + expected = ListArray.fromArrays(offsets, values); + + testCase.verifyEqual(actual, expected); + end + + function CellOfMatrices(testCase) + % Verify fromMATLAB can handle cell arrays that contain + % matrices instead of just vectors - i.e. the matrices are + % reshaped as column vectors before they are concatenated + % together. + import arrow.array.ListArray + + C = {[1 2 3; 4 5 6], [7 8; 9 10], 11}; + actual = ListArray.fromMATLAB(C); + + values = arrow.array([1 4 2 5 3 6 7 9 8 10 11]); + offsets = arrow.array(int32([0 6 10 11])); + expected = ListArray.fromArrays(offsets, values); + + testCase.verifyEqual(actual, expected); + end + + function ClassTypeMismatchError(testCase) + % Verify fromMATLAB throws an error whose identifier is + % "arrow:array:list:ClassTypeMismatch" if given a cell array + % containing arrays with different class types. + import arrow.array.ListArray + + C = {1, [2 3 4], "A", 5}; + fcn = @() ListArray.fromMATLAB(C); + testCase.verifyError(fcn, "arrow:array:list:ClassTypeMismatch"); + end + + function VariableNamesMismatchError(testCase) + % Verify fromMATLAB throws an error whose identifier is + % "arrow:array:list:VariableNamesMismatch" if given a cell + % array containing tables whose variable names don't match. + import arrow.array.ListArray + + C = {table(1, "A"), table(2, "B", VariableNames=["X", "Y"])}; + fcn = @() ListArray.fromMATLAB(C); + testCase.verifyError(fcn, "arrow:array:list:VariableNamesMismatch"); + end + + function ExpectedZonedDatetimeError(testCase) + % Verify fromMATLAB throws an error whose identifier is + % "arrow:array:list:ExpectedZonedDatetime" if given a cell + % array containing zoned and unzoned datetimes - in that order. + + import arrow.array.ListArray + + C = {datetime(2023, 11, 1, TimeZone="UTC"), datetime(2023, 11, 2)}; + fcn = @() ListArray.fromMATLAB(C); + testCase.verifyError(fcn, "arrow:array:list:ExpectedZonedDatetime"); + end + + function ExpectedUnzonedDatetimeError(testCase) + % Verify fromMATLAB throws an error whose identifier is + % "arrow:array:list:ExpectedUnzonedDatetime" if given a cell + % array containing unzoned and zoned datetimes - in that order. + + import arrow.array.ListArray + + C = {datetime(2023, 11, 1), datetime(2023, 11, 2, TimeZone="UTC")}; + fcn = @() ListArray.fromMATLAB(C); + testCase.verifyError(fcn, "arrow:array:list:ExpectedUnzonedDatetime"); + end + + + + end + +end \ No newline at end of file diff --git a/matlab/test/arrow/array/list/tTableValidator.m b/matlab/test/arrow/array/list/tTableValidator.m new file mode 100644 index 0000000000000..b3aeac9b6728c --- /dev/null +++ b/matlab/test/arrow/array/list/tTableValidator.m @@ -0,0 +1,286 @@ +%TTABLEVALIDATOR Unit tests for arrow.array.internal.list.TableValidator + +% Licensed to the Apache Software Foundation (ASF) under one or more +% contributor license agreements. See the NOTICE file distributed with +% this work for additional information regarding copyright ownership. +% The ASF licenses this file to you under the Apache License, Version +% 2.0 (the "License"); you may not use this file except in compliance +% with the License. You may obtain a copy of the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, +% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +% implied. See the License for the specific language governing +% permissions and limitations under the License. + +classdef tTableValidator < matlab.unittest.TestCase + + properties (Constant) + BaseTable = table(1, "A", datetime(2023, 11, 1, TimeZone="UTC"), ... + VariableNames=["Number", "Letter", "Date"]); + end + + methods(Test) + function Smoke(testCase) + import arrow.array.internal.list.TableValidator + + validator = TableValidator(testCase.BaseTable); + testCase.verifyInstanceOf(validator, "arrow.array.internal.list.TableValidator"); + end + + function TableWithZeroVariablesError(testCase) + % Verify the TableValidator constructor throws an exception + % whose identifier is "arrow:array:list:TableWithZeroVariables" + % if provided a table with zero variables. + import arrow.array.internal.list.TableValidator + + fcn = @() TableValidator(table); + testCase.verifyError(fcn, "arrow:array:list:TableWithZeroVariables"); + end + + function VariableNamesGetter(testCase) + % Verify the VariableNames property getter returns the + % expected string array. + import arrow.array.internal.list.TableValidator + + validator = TableValidator(testCase.BaseTable); + testCase.verifyEqual(validator.VariableNames, ["Number", "Letter", "Date"]); + end + + function VariableNamesNoSetter(testCase) + % Verify the VariableNames property is not settable. + import arrow.array.internal.list.TableValidator + + validator = TableValidator(testCase.BaseTable); + fcn = @() setfield(validator, "VariableNames", ["A", "B", "C"]); + testCase.verifyError(fcn, "MATLAB:class:SetProhibited"); + end + + function VariableValidatorsGetter(testCase) + % Verify the VariableValidators getter returns the expected + % arrow.array.internal.list.Validator array. + + import arrow.array.internal.list.TableValidator + import arrow.array.internal.list.DatetimeValidator + import arrow.array.internal.list.ClassTypeValidator + + validator = TableValidator(testCase.BaseTable); + + numberVariableValidator = ClassTypeValidator(1); + letterVariableValidator = ClassTypeValidator("A"); + datetimeVariableValidator = DatetimeValidator(datetime(2023, 10, 31, TimeZone="UTC")); + expectedValidators = [numberVariableValidator letterVariableValidator datetimeVariableValidator]; + testCase.verifyEqual(validator.VariableValidators, expectedValidators); + end + + function VariableValidatorsNoSetter(testCase) + % Verify the VariableValidators property is not settable. + import arrow.array.internal.list.TableValidator + import arrow.array.internal.list.ClassTypeValidator + + validator = TableValidator(testCase.BaseTable); + numberVariableValidator = ClassTypeValidator(1); + fcn = @() setfield(validator, "VariableValidators", numberVariableValidator); + testCase.verifyError(fcn, "MATLAB:class:SetProhibited"); + end + + function ClassNameGetter(testCase) + % Verify the ClassName getter returns the expected scalar + % string. + import arrow.array.internal.list.TableValidator + + validator = TableValidator(testCase.BaseTable); + testCase.verifyEqual(validator.ClassName, "table"); + end + + function ClassNameNoSetter(testCase) + % Verify the ClassName property is not settable. + import arrow.array.internal.list.TableValidator + + validator = TableValidator(testCase.BaseTable); + fcn = @() setfield(validator, "ClassName", "string"); + testCase.verifyError(fcn, "MATLAB:class:SetProhibited"); + end + + function ValidateElementClassTypeMismatchError(testCase) + % Verify validateElement throws an exception whose identifier + % is "arrow:array:list:ClassTypeMismatch" if the input is + % not a table. + import arrow.array.internal.list.TableValidator + + validator = TableValidator(testCase.BaseTable); + errorID = "arrow:array:list:ClassTypeMismatch"; + fcn = @() validator.validateElement(1); + testCase.verifyError(fcn, errorID); + fcn = @() validator.validateElement(seconds(1)); + testCase.verifyError(fcn, errorID); + end + + function ValidateElementNumVariablesMismatchError(testCase) + % Verify validateElement throws an exception whose identifier + % is "arrow:array:list:NumVariablesMismatch" if the input table + % does not have the expected number of variables. + import arrow.array.internal.list.TableValidator + + validator = TableValidator(testCase.BaseTable); + errorID = "arrow:array:list:NumVariablesMismatch"; + + inputTable = table(1, "A", VariableNames=["Number", "Letter"]); + fcn = @() validator.validateElement(inputTable); + testCase.verifyError(fcn, errorID); + + inputTable = table(1, "A", datetime(2023, 10, 30, TimeZone="UTC"), seconds(1), ... + VariableNames=["Number", "Letter", "Date", "Time"]); + fcn = @() validator.validateElement(inputTable); + testCase.verifyError(fcn, errorID); + end + + function ValidateElementVariableNamesMismatchError(testCase) + % Verify validateElement throws an exception whose identifier + % is "arrow:array:list:NumVariablesMismatch" if the input table + % does not have the expected variable names of variables. + import arrow.array.internal.list.TableValidator + + validator = TableValidator(testCase.BaseTable); + errorID = "arrow:array:list:VariableNamesMismatch"; + + inputTable = table(1, "A", datetime(2023, 10, 31, TimeZone="UTC"), ... + VariableNames=["A", "B", "C"]); + fcn = @() validator.validateElement(inputTable); + testCase.verifyError(fcn, errorID); + end + + function ValidateElementErrorFromFirstVariable(testCase) + % Verify validateElement throws an exception if there is an + % issue with the first variable in the input table. + import arrow.array.internal.list.TableValidator + + validator = TableValidator(testCase.BaseTable); + + % validator expects the second variable to be a double + inputTable = table(true, "A", datetime(2023, 10, 31, TimeZone="UTC"), ... + VariableNames=["Number", "Letter", "Date"]); + fcn = @() validator.validateElement(inputTable); + testCase.verifyError(fcn, "arrow:array:list:ClassTypeMismatch"); + + % validator expects all table variables that are not tables + % themselves to be columnar or empty + nonColumnar = [1 2 3 4]; + inputTable = table(nonColumnar, "B", datetime(2023, 10, 31, TimeZone="UTC"), ... + VariableNames=["Number", "Letter", "Date"]); + fcn = @() validator.validateElement(inputTable); + testCase.verifyError(fcn, "arrow:array:list:NonTabularVariablesMustBeColumnar"); + end + + function ValidateElementErrorFromSecondVariable(testCase) + % Verify validateElement throws an exception if there is an + % issue with the second variable in the input table. + import arrow.array.internal.list.TableValidator + + validator = TableValidator(testCase.BaseTable); + + % validator expects the second variable to be a string + inputTable = table(2, seconds(1), datetime(2023, 10, 31, TimeZone="UTC"), ... + VariableNames=["Number", "Letter", "Date"]); + fcn = @() validator.validateElement(inputTable); + testCase.verifyError(fcn, "arrow:array:list:ClassTypeMismatch"); + + % validator expects all table variables that are not tables + % themselves to be columnar or empty + nonColumnar = ["A" "B"]; + inputTable = table(2, nonColumnar, datetime(2023, 10, 31, TimeZone="UTC"), ... + VariableNames=["Number", "Letter", "Date"]); + fcn = @() validator.validateElement(inputTable); + testCase.verifyError(fcn, "arrow:array:list:NonTabularVariablesMustBeColumnar"); + end + + function ValidateElementErrorFromThirdVariable(testCase) + % Verify validateElement throws an exception if there is an + % issue with the third variable in the input table. + import arrow.array.internal.list.TableValidator + + validator = TableValidator(testCase.BaseTable); + + % validator expects the third variable to be a zoned datetime + inputTable = table(2, "B", datetime(2023, 10, 31), ... + VariableNames=["Number", "Letter", "Date"]); + fcn = @() validator.validateElement(inputTable); + testCase.verifyError(fcn, "arrow:array:list:ExpectedZonedDatetime"); + + % validator expects the third variable to be datetime + inputTable = table(2, "B", uint8(1), ... + VariableNames=["Number", "Letter", "Date"]); + fcn = @() validator.validateElement(inputTable); + testCase.verifyError(fcn, "arrow:array:list:ClassTypeMismatch"); + + % validator expects all table variables that are not tables + % themselves to be columnar or empty + nonColumnar = datetime(2023, 10, 31, TimeZone="UTC") + days(0:4); + inputTable = table(2, "B", nonColumnar, VariableNames=["Number", "Letter", "Date"]); + fcn = @() validator.validateElement(inputTable); + testCase.verifyError(fcn, "arrow:array:list:NonTabularVariablesMustBeColumnar"); + end + + function validateElementNoThrow(testCase) + % Verify validateElement does not throw an exception if the + % input provided matches the expected schema. + + import arrow.array.internal.list.TableValidator + validator = TableValidator(testCase.BaseTable); + + inputTable = table(2, "B", datetime(2023, 10, 31, TimeZone="UTC"), ... + VariableNames=["Number", "Letter", "Date"]); + validator.validateElement(inputTable); + + inputTable = repmat(inputTable, [10 1]); + validator.validateElement(inputTable); + + % Create a 0x3 table + inputTable = inputTable(1:0, :); + validator.validateElement(inputTable); + end + + function validateElementNestedTableVariable(testCase) %#ok + % Verify table variables that are tables themselves do not have + % to be columnar, i.e. can have more than one variable. + import arrow.array.internal.list.TableValidator + + baseTable = table(1, seconds(1), table("A", false)); + validator = TableValidator(baseTable); + + inputTable = table([1; 2], seconds([3;4]), table(["C"; "D"], [false; false])); + validator.validateElement(inputTable); + end + + function GetElementLength(testCase) + % Verify GetElementLength returns the the number of rows as the + % length of the element. + import arrow.array.internal.list.TableValidator + + validator = TableValidator(testCase.BaseTable); + + length = validator.getElementLength(testCase.BaseTable); + testCase.verifyEqual(length, 1); + + length = validator.getElementLength(repmat(testCase.BaseTable, [12 1])); + testCase.verifyEqual(length, 12); + + length = validator.getElementLength(testCase.BaseTable(1:0, :)); + testCase.verifyEqual(length, 0); + end + + function ReshapeCellElements(testCase) + % Verify reshapeCellElements is a no-op. It should return the + % original cell array unchanged. + import arrow.array.internal.list.TableValidator + + validator = TableValidator(testCase.BaseTable); + + COriginal = {testCase.BaseTable, repmat(testCase.BaseTable, [10 1]), testCase.BaseTable(1:0, :)}; + CActual = validator.reshapeCellElements(COriginal); + testCase.verifyEqual(COriginal, CActual); + end + end +end \ No newline at end of file diff --git a/matlab/test/arrow/array/tArray.m b/matlab/test/arrow/array/tArray.m index 545d382ddf7f4..0012ad900ecb2 100644 --- a/matlab/test/arrow/array/tArray.m +++ b/matlab/test/arrow/array/tArray.m @@ -33,7 +33,8 @@ {datetime(2022, 1, 1), "arrow.array.TimestampArray"}, ... {seconds([1 2]), "arrow.array.Time64Array"}, ... {["A" "B"], "arrow.array.StringArray"}, ... - {table(["A" "B"]'), "arrow.array.StructArray"}}; + {table(["A" "B"]'), "arrow.array.StructArray"}, ... + {{[1, 2, 3], [4, 5]}, "arrow.array.ListArray"}}; end methods(Test) @@ -51,7 +52,7 @@ function UnsupportedMATLABTypeError(testCase) % Verify arrow.array throws an error with the identifier % "arrow:array:UnsupportedMATLABType" if the input array is not one % we support converting into an Arrow array. - matlabArray = {table}; + matlabArray = calmonths(12); fcn = @() arrow.array(matlabArray); errID = "arrow:array:UnsupportedMATLABType"; testCase.verifyError(fcn, errID); diff --git a/matlab/test/arrow/array/tListArray.m b/matlab/test/arrow/array/tListArray.m index 1ebf66e2f0999..07304eb384299 100644 --- a/matlab/test/arrow/array/tListArray.m +++ b/matlab/test/arrow/array/tListArray.m @@ -23,6 +23,7 @@ properties (TestParameter) TestArrowArray + TestValidationModeArray end methods (TestParameterDefinition, Static) @@ -113,6 +114,29 @@ ); end + function TestValidationModeArray = initializeTestValidationModeArray() + %% Valid ListArray + Offsets = arrow.array(int32([0, 1, 2, 3])); + Values = arrow.array([1, 2, 3]); + + TestValidationModeArray.ValidList = struct( ... + Offsets=Offsets, ... + Values=Values, ... + Valid=true ... + ); + + %% Invalid ListArray + % Incorrect number of offsets (length should be 1 more than the number of Values). + Offsets = arrow.array(int32([0, 1, 2, 3, 4, 5])); + Values = arrow.array([1, 2, 3]); + + TestValidationModeArray.InvalidList = struct( ... + Offsets=Offsets, ... + Values=Values, ... + Valid=false ... + ); + end + end methods (Test) @@ -160,6 +184,87 @@ function TestErrorIfEmptyOffsets(testCase) testCase.verifyError(fcn, "arrow:array:ListArrayFromArraysFailed"); end + function TestValidationModeDefault(testCase, TestValidationModeArray) + % Verify that the default ValidationMode value for the + % arrow.array.ListArray.fromArrays method is + % arrow.array.ValidationMode.Minimal. + offsets = TestValidationModeArray.Offsets; + values = TestValidationModeArray.Values; + valid = TestValidationModeArray.Valid; + fcn = @() arrow.array.ListArray.fromArrays(offsets, values); + if valid + testCase.verifyWarningFree(fcn); + else + testCase.verifyError(fcn, "arrow:array:ValidateMinimalFailed"); + end + end + + function TestValidationModeNone(testCase, TestValidationModeArray) + % Verify that no error is thrown when supplying the + % ValidatationMode name-value pair, with a value of + % arrow.array.ValidationMode.None, to the + % arrow.array.ListArray.fromArrays method. + offsets = TestValidationModeArray.Offsets; + values = TestValidationModeArray.Values; + validationMode = arrow.array.ValidationMode.None; + fcn = @() arrow.array.ListArray.fromArrays(offsets, values, ValidationMode=validationMode); + testCase.verifyWarningFree(fcn); + end + + function TestValidationModeMinimal(testCase, TestValidationModeArray) + % Verify that an error of type arrow:array:ValidateMinimalFailed + % is thrown when supplying the ValidatationMode name-value pair, + % with a value of arrow.array.ValidationMode.Minimal, to the + % arrow.array.ListArray.fromArrays method, if the provided offsets + % and values arrays are invalid. + offsets = TestValidationModeArray.Offsets; + values = TestValidationModeArray.Values; + valid = TestValidationModeArray.Valid; + validationMode = arrow.array.ValidationMode.Minimal; + fcn = @() arrow.array.ListArray.fromArrays(offsets, values, ValidationMode=validationMode); + if valid + testCase.verifyWarningFree(fcn); + else + testCase.verifyError(fcn, "arrow:array:ValidateMinimalFailed"); + end + end + + function TestValidationModeFull(testCase, TestValidationModeArray) + % Verify that an error of type arrow:array:ValidateFullFailed + % is thrown when supplying the ValidatationMode name-value pair, + % with a value of arrow.array.ValidationMode.Full, to the + % arrow.array.ListArray.fromArrays method, if the provided offsets + % and values arrays are invalid. + offsets = TestValidationModeArray.Offsets; + values = TestValidationModeArray.Values; + validationMode = arrow.array.ValidationMode.Full; + valid = TestValidationModeArray.Valid; + fcn = @() arrow.array.ListArray.fromArrays(offsets, values, ValidationMode=validationMode); + if valid + testCase.verifyWarningFree(fcn); + else + testCase.verifyError(fcn, "arrow:array:ValidateFullFailed"); + end + end + + function TestValidationModeUnsupportedEnum(testCase) + % Verify that an error of type arrow:array:ValidateUnsupportedEnum + % is thrown when an unsupported integer enumeration value is + % supplied for the ValidatationMode parameter to the internal + % C++ ListArray Proxy validate method. + offsets = arrow.array.Int32Array.fromMATLAB(int32([0, 1, 2])); + values = arrow.array.Float64Array.fromMATLAB([1, 2, 3]); + array = arrow.array.ListArray.fromArrays(offsets, values); + % Get the underlying Proxy instance from the ListArray. + proxy = array.Proxy; + % Call the internal Proxy method "validate" with an unsupported + % integer ValidationMode value. + validationMode = uint8(3); + args = struct(ValidationMode=validationMode); + fcn = @() proxy.validate(args); + testCase.verifyError(fcn, "arrow:array:ValidateUnsupportedEnum"); + end + end end diff --git a/matlab/test/arrow/array/tSlice.m b/matlab/test/arrow/array/tSlice.m new file mode 100644 index 0000000000000..c99503371a41c --- /dev/null +++ b/matlab/test/arrow/array/tSlice.m @@ -0,0 +1,138 @@ +%TSLICE Unit tests verifying the behavior of arrow.array.Array's slice +%method. + +% Licensed to the Apache Software Foundation (ASF) under one or more +% contributor license agreements. See the NOTICE file distributed with +% this work for additional information regarding copyright ownership. +% The ASF licenses this file to you under the Apache License, Version +% 2.0 (the "License"); you may not use this file except in compliance +% with the License. You may obtain a copy of the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, +% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +% implied. See the License for the specific language governing +% permissions and limitations under the License. + +classdef tSlice < matlab.unittest.TestCase + + methods(Test) + function BooleanArray(testCase) + % Verify the slice method returns the expected array when + % called on a Boolean Array. + boolArray = arrow.array([true true false true true false], Valid=[1 2 3 6]); + slice = boolArray.slice(int64(2), int64(4)); + testCase.verifyEqual(slice.NumElements, int64(4)); + testCase.verifyEqual(slice.Valid, [true; true; false; false]); + testCase.verifyEqual(toMATLAB(slice), [true; false; false; false]); + end + + function NumericArray(testCase) + % Verify the slice method returns the expected array when + % called on a Numeric Array. + float64Array = arrow.array(1:10, Valid=[2 3 4 5 8 10]); + slice = float64Array.slice(int64(4), int64(5)); + testCase.verifyEqual(slice.NumElements, int64(5)); + testCase.verifyEqual(slice.Valid, [true; true; false; false; true]); + testCase.verifyEqual(toMATLAB(slice), [4; 5; NaN; NaN; 8]); + end + + function DateArray(testCase) + % Verify the slice method returns the expected array when + % called on a Date Array. + import arrow.array.Date32Array + dates = datetime(2023, 11, 8:16); + date32Array = Date32Array.fromMATLAB(dates, Valid=[4 5 6 9]); + slice = date32Array.slice(int64(3), int64(4)); + testCase.verifyEqual(slice.NumElements, int64(4)); + testCase.verifyEqual(slice.Valid, [false; true; true; true]); + expected = [NaT; dates(4:6)']; + testCase.verifyEqual(toMATLAB(slice), expected); + end + + function TimeArray(testCase) + % Verify the slice method returns the expected array when + % called on a Time Array. + times = seconds(10:20); + time64Array = arrow.array(times, Valid=[2 4 6 7 8 10]); + slice = time64Array.slice(int64(5), int64(6)); + testCase.verifyEqual(slice.NumElements, int64(6)); + testCase.verifyEqual(slice.Valid, [false; true; true; true; false; true]); + expected = [NaN; times(6:8)'; NaN; times(10)]; + testCase.verifyEqual(toMATLAB(slice), expected); + end + + function TimestampArray(testCase) + % Verify the slice method returns the expected array when + % called on a TimestampArray. + dates = datetime(2023, 11, 8:16); + date32Array = arrow.array(dates, Valid=[1 2 4 5 6 8]); + slice = date32Array.slice(int64(5), int64(3)); + testCase.verifyEqual(slice.NumElements, int64(3)); + testCase.verifyEqual(slice.Valid, [true; true; false]); + expected = [dates(5:6)'; NaT]; + testCase.verifyEqual(toMATLAB(slice), expected); + end + + function StringArray(testCase) + % Verify the slice method returns the expected array when + % called on a StringArray. + stringArray = arrow.array(["a" "b" "c" "d" "e" "f" "g"], Valid=[1 3 4 5 6]); + slice = stringArray.slice(int64(2), int64(3)); + testCase.verifyEqual(slice.NumElements, int64(3)); + testCase.verifyEqual(slice.Valid, [false; true; true]); + expected = [missing; "c"; "d"]; + testCase.verifyEqual(toMATLAB(slice), expected); + end + + function ListArray(testCase) + % Verify the slice method returns the expected array when + % called on a ListArray. + cellArray = {missing, [1, 2, 3], missing, [4, NaN], [6, 7, 8], missing}; + listArray = arrow.array(cellArray); + slice = listArray.slice(int64(2), int64(4)); + testCase.verifyEqual(slice.NumElements, int64(4)); + testCase.verifyEqual(slice.Valid, [true; false; true; true]); + expected = {[1; 2; 3]; missing; [4; NaN]; [6; 7; 8]}; + testCase.verifyEqual(toMATLAB(slice), expected); + end + + function StructArray(testCase) + % Verify the slice method returns the expected array when + % called on a StructArray. + numbers = [NaN; 2; 3; 4; 5; 6; 7; NaN; 9; 10]; + text = ["a"; missing; "c"; "d"; "e"; missing; "g"; "h"; "i"; "j"]; + t = table(numbers, text); + structArray = arrow.array(t, Valid=[1 2 3 6 7 8 10]); + slice = structArray.slice(int64(5), int64(4)); + testCase.verifyEqual(slice.NumElements, int64(4)); + testCase.verifyEqual(slice.Valid, [false; true; true; true]); + expected = t(5:8, :); + expected.numbers(1) = NaN; + expected.text(1) = missing; + testCase.verifyEqual(toMATLAB(slice), expected); + end + + function NonPositiveOffsetError(testCase) + % Verify the slice method throws an error whose identifier is + % "arrow:array:slice:NonPositiveOffset" if given a non-positive + % value as the offset. + array = arrow.array(1:10); + fcn = @() array.slice(int64(0), int64(2)); + testCase.verifyError(fcn, "arrow:array:slice:NonPositiveOffset"); + fcn = @() array.slice(int64(-1), int64(2)); + testCase.verifyError(fcn, "arrow:array:slice:NonPositiveOffset"); + end + + function NegativeLengthError(testCase) + % Verify the slice method throws an error whose identifier is + % "arrow:array:slice:NegativeLength" if given a negative value + % as the length. + array = arrow.array(1:10); + fcn = @() array.slice(int64(1), int64(-1)); + testCase.verifyError(fcn, "arrow:array:slice:NegativeLength"); + end + end +end \ No newline at end of file diff --git a/matlab/test/arrow/array/tTime32Array.m b/matlab/test/arrow/array/tTime32Array.m index cc2fad64b2a28..24c3508a86015 100644 --- a/matlab/test/arrow/array/tTime32Array.m +++ b/matlab/test/arrow/array/tTime32Array.m @@ -30,6 +30,15 @@ function Basic(tc) times = seconds(1:4); array = tc.ArrowArrayConstructorFcn(times); tc.verifyInstanceOf(array, "arrow.array.Time32Array"); + tc.verifyEqual(array.toMATLAB, times'); + end + + function TimeUnitDefaultValue(tc) + % Verify that the default value of "TimeUnit" is "second". + times = seconds([1.2 1.3 1.4 1.5 1.7]); + array = tc.ArrowArrayConstructorFcn(times); + tc.verifyEqual(array.Type.TimeUnit, arrow.type.TimeUnit.Second); + tc.verifyEqual(array.toMATLAB, seconds([1;1;1;2;2])); end function TypeIsTime32(tc) @@ -274,6 +283,30 @@ function TestIsEqualFalseTimeUnitMistmatch(tc) % arrays are not equal tc.verifyFalse(isequal(array1, array2)); end + + function RoundTimeBySpecifiedTimeUnit(tc) + % Verify that the input parameter "TimeUnit" is used to specify + % the time resolution. The value is rounded off based on the + % specified "TimeUnit". + + % TimeUnit="Second" + matlabTimes = seconds([1.1, 1.4, 1.5, 1.9, 2.001]); + arrowTimes = tc.ArrowArrayConstructorFcn(matlabTimes, TimeUnit="Second"); + tc.verifyEqual(arrowTimes.toMATLAB(),seconds([1, 1, 2, 2, 2])'); + + % TimeUnit="Millisecond" + matlabTimes = seconds([1.1, 1.99, 1.001, 1.0004, 1.0005, 2.001]); + arrowTimes = tc.ArrowArrayConstructorFcn(matlabTimes, TimeUnit="Millisecond"); + tc.verifyEqual(arrowTimes.toMATLAB(),seconds([1.1, 1.99, 1.001, 1, 1.001, 2.001])','AbsTol',seconds(1e-15)); + end + + function TimeUnitIsReadOnly(tc) + % Verify that arrowArray.Type.TimeUnit cannot be changed. + + matlabTimes = seconds([1.1, 1.4, 1.5, 1.9, 2.001]); + arrowArray = tc.ArrowArrayConstructorFcn(matlabTimes); + tc.verifyError(@()setfield(arrowArray.Type,"TimeUnit", "millisecond"),'MATLAB:class:SetProhibited'); + end end methods diff --git a/matlab/test/arrow/array/tTime64Array.m b/matlab/test/arrow/array/tTime64Array.m index a078c5e2173f3..3f66ebd638c65 100644 --- a/matlab/test/arrow/array/tTime64Array.m +++ b/matlab/test/arrow/array/tTime64Array.m @@ -30,6 +30,26 @@ function Basic(tc) times = seconds(1:4); array = tc.ArrowArrayConstructorFcn(times); tc.verifyInstanceOf(array, "arrow.array.Time64Array"); + tc.verifyEqual(array.toMATLAB, times'); + end + + function TimeUnitDefaultValue(tc) + % Verify that the default value of "TimeUnit" is "Microsecond". + matlabTimes = seconds([1; ... + 0.001; ... + 2.004521; ... + 3.1234564; ... + 4.1234566; ... + 5.000000123]); + arrowArray = tc.ArrowArrayConstructorFcn(matlabTimes); + tc.verifyEqual(arrowArray.Type.TimeUnit, arrow.type.TimeUnit.Microsecond); + tc.verifyEqual(arrowArray.toMATLAB(), ... + seconds([1;... + 0.001; ... + 2.004521; ... + 3.123456; ... + 4.123457; ... + 5])); end function TypeIsTime64(tc) @@ -290,6 +310,62 @@ function TestIsEqualFalseTimeUnitMistmatch(tc) % arrays are not equal tc.verifyFalse(isequal(array1, array2)); end + + function RoundTimeBySpecifiedTimeUnit(tc) + % Verify that the input parameter "TimeUnit" is used to specify + % the time resolution. The value is rounded off based on the + % specified "TimeUnit". + + % TimeUnit="Microsecond" + matlabTimes = seconds([1.000001, ... + 2.999999, ... + 0.0002004, ... + 0.0000035, ... + 10.123456499, ... + 9.999999543]); + arrowTimes = tc.ArrowArrayConstructorFcn(matlabTimes, TimeUnit="Microsecond"); + tc.verifyEqual(arrowTimes.toMATLAB(), ... + seconds([1.000001, ... + 2.999999, ... + 0.0002, ... + 0.000004, ... + 10.123456, ... + 10])', ... + 'AbsTol',seconds(1e-14)); + + % TimeUnit="Nanosecond" + matlabTimes = seconds([1, ... + 1.123, ... + 1.12345, ... + 1.123456, ... + 1.1234567, ... + 1.12345678, ... + 1.123456789, ... + 1.1234567894, ... + 1.1234567895, ... + 1.123456789009]); + arrowTimes = tc.ArrowArrayConstructorFcn(matlabTimes, TimeUnit="Nanosecond"); + tc.verifyEqual(arrowTimes.toMATLAB(),... + seconds([1, ... + 1.123, ... + 1.12345, ... + 1.123456, ... + 1.1234567, ... + 1.12345678, ... + 1.123456789, ... + 1.123456789, ... + 1.123456790, ... + 1.123456789])',... + 'AbsTol',seconds(1e-15)); + end + + function TimeUnitIsReadOnly(tc) + % Verify that arrowArray.Type.TimeUnit cannot be changed. + + matlabTimes = seconds([1.000001, 2.999999, 0.0002004]); + arrowArray = tc.ArrowArrayConstructorFcn(matlabTimes); + tc.verifyError(@()setfield(arrowArray.Type,"TimeUnit", "Nanosecond"),'MATLAB:class:SetProhibited'); + end end methods diff --git a/matlab/test/arrow/tabular/tTabularDisplay.m b/matlab/test/arrow/tabular/tTabularDisplay.m new file mode 100644 index 0000000000000..027517edeb2d6 --- /dev/null +++ b/matlab/test/arrow/tabular/tTabularDisplay.m @@ -0,0 +1,342 @@ +%TTABULARDISPLAY Unit tests verifying the display of arrow.tabular.Table +%and arrow.tabular.RecordBatch objects. + +% Licensed to the Apache Software Foundation (ASF) under one or more +% contributor license agreements. See the NOTICE file distributed with +% this work for additional information regarding copyright ownership. +% The ASF licenses this file to you under the Apache License, Version +% 2.0 (the "License"); you may not use this file except in compliance +% with the License. You may obtain a copy of the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, +% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +% implied. See the License for the specific language governing +% permissions and limitations under the License. + +classdef tTabularDisplay < matlab.unittest.TestCase + + properties (TestParameter) + TabularType + end + + methods (TestParameterDefinition, Static) + function TabularType = initializeTabularType() + + tableStruct = struct(FullClassName="arrow.tabular.Table", ... + ClassName="Table", FromTableFcn = @arrow.table); + + recordBatchStruct = struct(FullClassName="arrow.tabular.RecordBatch", ... + ClassName="RecordBatch", FromTableFcn=@arrow.recordBatch); + + TabularType = struct(Table=tableStruct, RecordBatch=recordBatchStruct); + end + + end + + methods (Test) + + function ZeroRowsZeroColumns(testCase, TabularType) + % Verify tabular object display when the object has zero rows + % and zero columns. + import arrow.internal.test.display.makeLinkString + + tabularObj = TabularType.FromTableFcn(table); %#ok + + classNameString = makeLinkString(FullClassName=TabularType.FullClassName, ... + ClassName=TabularType.ClassName, BoldFont=true); + zeroString = getNumString(0); + header = compose(" Arrow %s with %s rows and %s columns", classNameString, zeroString, zeroString); + expectedDisplay = char(header + newline + newline); + actualDisplay = evalc('disp(tabularObj)'); + + testCase.verifyEqual(actualDisplay, expectedDisplay); + end + + function ZeroRowsOneColumn(testCase, TabularType) + % Verify tabular object display when the object has zero rows + % and one column. + import arrow.internal.test.display.makeLinkString + + t = table(1, VariableNames="Number"); + tabularObj = TabularType.FromTableFcn(t(1:0, :)); %#ok + + classNameString = makeLinkString(FullClassName=TabularType.FullClassName, ... + ClassName=TabularType.ClassName, BoldFont=true); + header = compose(" Arrow %s with %s rows and %s column:", classNameString, getNumString(0), getNumString(1)); + + fieldString = makeFieldString("Number", "Float64", "arrow.type.Float64Type"); + schema = join([" Schema:" " " + fieldString], [newline newline]); + + expectedDisplay = char(join([header schema + newline + newline], [newline newline])); + actualDisplay = evalc('disp(tabularObj)'); + + testCase.verifyEqual(actualDisplay, expectedDisplay); + end + + function ZeroRowsMultipleColumns(testCase, TabularType) + % Verify that the display of a 0xN arrow tabular object displays + % the schema but no rows because there are zero rows. + import arrow.internal.test.display.makeLinkString + t = table(Size=[0,6], ... + VariableTypes=["double" "single" "int8" "logical" "uint64" "string"], ... + VariableNames=["ratio = a / b" "number" "ID" "A very looooooooooooooong name" "Result" "侯磊"]); + tabularObj = TabularType.FromTableFcn(t); + actualDisplay = evalc('disp(tabularObj)'); + + classNameString = makeLinkString(FullClassName=TabularType.FullClassName, ... + ClassName=TabularType.ClassName, BoldFont=true); + header = compose(" Arrow %s with %s rows and %s columns:", classNameString, getNumString(0), getNumString(6)); + var1Field = char(makeFieldString( "ratio = a / b", "Float64", "arrow.type.Float64Type")); + var2Field = char(makeFieldString( "number", "Float32", "arrow.type.Float32Type")); + var3Field = char(makeFieldString( "ID", "Int8" , "arrow.type.Int8Type")); + var4Field = char(makeFieldString("A very looooooooooooooong name", "Boolean", "arrow.type.BooleanType")); + var5Field = char(makeFieldString( "Result", "UInt64" , "arrow.type.UInt64Type")); + var6Field = char(makeFieldString( "侯磊", "String" , "arrow.type.StringType")); + expectedDisplay = [char(header), newline, ... + newline, ... + ' Schema:', newline, ... + newline ... + ' ', var1Field, ' | ', var2Field, ' | ', var3Field, ' | ', var4Field, ' | ', var5Field, ' | ', var6Field, newline, ... + newline]; + + testCase.verifyEqual(actualDisplay, expectedDisplay); + end + + function OneRowOneColumn(testCase, TabularType) + % Verify tabular object display when the object has one row + % and column. + import arrow.internal.test.display.makeLinkString + + t = table(1, VariableNames="Number"); + tabularObj = TabularType.FromTableFcn(t); %#ok + + classNameString = makeLinkString(FullClassName=TabularType.FullClassName, ... + ClassName=TabularType.ClassName, BoldFont=true); + header = compose(" Arrow %s with %s row and %s column:", classNameString, getNumString(1), getNumString(1)); + + fieldString = makeFieldString("Number", "Float64", "arrow.type.Float64Type"); + schema = join([" Schema:" " " + fieldString], [newline newline]); + row = join([" First Row:" " 1"], [newline newline]); + + + expectedDisplay = char(join([header schema row + newline + newline], [newline newline])); + actualDisplay = evalc('disp(tabularObj)'); + + testCase.verifyEqual(actualDisplay, expectedDisplay); + end + + function MultipleRowsAndColumns(testCase, TabularType) + % Verify tabular object display when the object has mulitple rows + % and columns. Only the first row is displayed. All columns are + % displayed. + import arrow.internal.test.display.makeLinkString + + t = table((1:2)', ["A"; "B"], true(2, 1), VariableNames=["Number", "Letter", "Logical"]); + tabularObj = TabularType.FromTableFcn(t); %#ok + + classNameString = makeLinkString(FullClassName=TabularType.FullClassName, ... + ClassName=TabularType.ClassName, BoldFont=true); + header = compose(" Arrow %s with %s rows and %s columns:", classNameString, getNumString(2), getNumString(3)); + + fieldOneString = makeFieldString("Number", "Float64", "arrow.type.Float64Type"); + fieldTwoString = makeFieldString("Letter", "String", "arrow.type.StringType"); + fieldThreeString = makeFieldString("Logical", "Boolean", "arrow.type.BooleanType"); + + fields = join([fieldOneString fieldTwoString fieldThreeString], " | "); + schema = join([" Schema:" " " + fields], [newline newline]); + row = join([" First Row:" " 1 | ""A"" | true"], [newline newline]); + + expectedDisplay = char(join([header schema row + newline + newline], [newline newline])); + actualDisplay = evalc('disp(tabularObj)'); + + testCase.verifyEqual(actualDisplay, expectedDisplay); + end + + function VeryWideTabular(testCase, TabularType) + % Verify that all variables are displayed without any trucation + % even when the tabular object is wider than the MATLAB Command + % Window. + import arrow.internal.test.display.makeLinkString + + t = array2table([1:100;101:200],VariableNames="x"+(1:100)); + arrowTabularObj = TabularType.FromTableFcn(t); + actualDisplay = evalc('disp(arrowTabularObj)'); + + classNameString = makeLinkString(FullClassName=TabularType.FullClassName, ... + ClassName=TabularType.ClassName, BoldFont=true); + header = compose(" Arrow %s with %s rows and %s columns:", classNameString, getNumString(2), getNumString(100)); + schemaDisplay = [' Schema:', newline, newline, ' ']; + dataDisplay = [' First Row:', newline, newline, ' ']; + for i = 1:width(t) + if i < width(t) + schemaDisplay = [schemaDisplay, char(makeFieldString("x"+i, "Float64", "arrow.type.Float64Type")), ' | ']; + dataDisplay = [dataDisplay, num2str(i), ' | ']; + else + schemaDisplay = [schemaDisplay, char(makeFieldString("x"+i, "Float64", "arrow.type.Float64Type"))]; + dataDisplay = [dataDisplay, num2str(i)]; + end + end + expectedDisplay = [char(header), newline, ... + newline, ... + schemaDisplay, newline, ... + newline ... + dataDisplay, newline, ... + newline]; + testCase.verifyEqual(actualDisplay, expectedDisplay); + end + + function DataContainsHyperlink(testCase, TabularType) + % Verify that the data text containing hyperlink is always + % displayed as expected no matter whether hotlinks is turned on + % or off. + import arrow.internal.test.display.makeLinkString + + hLink1 = string('foo'); + hLink2 = string('another foo'); + t = table(["a";"bcd";missing;""],[hLink1;hLink2;hLink1;hLink2],[NaN;1;2;3],'VariableNames',["Description", "Link_to_doc", "Result"]); + arrowTabularObj = TabularType.FromTableFcn(t); + + % The display of schema and actual tabular data always contains + % hyperlinks no matter whether hotlinks is turned on or off + var1Field = char(makeFieldString("Description", "String" , "arrow.type.StringType")); + var2Field = char(makeFieldString("Link_to_doc", "String" , "arrow.type.StringType")); + var3Field = char(makeFieldString( "Result", "Float64", "arrow.type.Float64Type")); + schemaDisplay = [' Schema:', newline, newline, ... + ' ', var1Field, ' | ', var2Field, ' | ', var3Field]; + dataDisplay = [' First Row:', newline, newline, ... + ' ', '"a"', ' | ', '"', 'foo', '"', ' | ', 'null']; + expectedDisplayOfData = [newline, ... + ' Schema:', newline, ... + newline ... + ' ', var1Field, ' | ', var2Field, ' | ', var3Field, newline, ... + newline, ... + ' First Row:', newline, ... + newline, ... + ' ', '"a"', ' | ', '"', 'foo', '"', ' | ', 'null', newline, ... + newline]; + + % hotlinks is turned off + actualDisplay = evalc('feature(''hotlinks'',''off'');disp(arrowTabularObj)'); + testCase.verifySubstring(actualDisplay, expectedDisplayOfData); + + % hotlinks is turned on + actualDisplay = evalc('feature(''hotlinks'',''on'');disp(arrowTabularObj)'); + testCase.verifySubstring(actualDisplay, expectedDisplayOfData); + end + + function DisplayClassNameWhenDataIsNotArray(testCase, TabularType) + % Verify that the class name instead of the actual data will be + % displayed when the datatype of a tabular variable is a nested + % array type (e.g. StructArray or ListArray). + import arrow.internal.test.display.makeLinkString + + t = table(datetime(2023,1,[1;2;3]),table([1;2;3],[4;5;6]),seconds([1;2;3])); + arrowTabularObj = TabularType.FromTableFcn(t); + actualDisplay = evalc('disp(arrowTabularObj)'); + + classNameString = makeLinkString(FullClassName=TabularType.FullClassName, ... + ClassName=TabularType.ClassName, BoldFont=true); + header = compose(" Arrow %s with %s rows and %s columns:", classNameString, getNumString(3), getNumString(3)); + var1Field = char(makeFieldString("Var1", "Timestamp" , "arrow.type.TimestampType")); + var2Field = char(makeFieldString("Var2", "Struct" , "arrow.type.StructType")); + var3Field = char(makeFieldString("Var3", "Time64", "arrow.type.Time64Type")); + expectedDisplay = [char(header), newline, ... + newline, ... + ' Schema:', newline, ... + newline, ... + ' ', var1Field, ' | ', var2Field, ' | ', var3Field, newline, ... + newline, ... + ' First Row:', newline, ... + newline, ... + ' ', '2023-01-01 00:00:00.000000 | | 00:00:01.000000', newline,... + newline]; + + testCase.verifyEqual(actualDisplay, expectedDisplay); + end + + function DisplayInvalidData(testCase, TabularType) + % Verify that "null" is displayed for invalid value. + import arrow.internal.test.display.makeLinkString + + t = table(seconds([NaN;1]), string([missing;"a"]), string(["";"b"]), [NaN;1], datetime(2023,1,[NaN;2]),... + VariableNames=["durationVar", "stringVar1", "stringVar2", "doubleVar", "datetimeVar"]); + arrowTabularObj = TabularType.FromTableFcn(t); + actualDisplay = evalc('disp(arrowTabularObj)'); + + classNameString = makeLinkString(FullClassName=TabularType.FullClassName, ... + ClassName=TabularType.ClassName, BoldFont=true); + header = compose(" Arrow %s with %s rows and %s columns:", classNameString, getNumString(2), getNumString(5)); + var1Field = char(makeFieldString("durationVar", "Time64" , "arrow.type.Time64Type")); + var2Field = char(makeFieldString("stringVar1", "String" , "arrow.type.StringType")); + var3Field = char(makeFieldString("stringVar2", "String", "arrow.type.StringType")); + var4Field = char(makeFieldString("doubleVar", "Float64", "arrow.type.Float64Type")); + var5Field = char(makeFieldString("datetimeVar", "Timestamp", "arrow.type.TimestampType")); + expectedDisplay = [char(header), newline, ... + newline, ... + ' Schema:', newline, ... + newline, ... + ' ', var1Field, ' | ', var2Field, ' | ', var3Field, ' | ', var4Field, ' | ', var5Field, newline, ... + newline, ... + ' First Row:', newline, ... + newline, ... + ' ', 'null | null | "" | null | null', newline,... + newline]; + + testCase.verifyEqual(actualDisplay, expectedDisplay); + end + + function Unicode(testCase, TabularType) + % Verify that unicode characters are displayed well. The + % current display doesn't align multiple rows vertically. + % Therefore, there is no alignment concern. + import arrow.internal.test.display.makeLinkString + + t = table([0.1;0.2],string(char([26228 22825; 22810 20113])), ["Kevin"; "Lei"],... + VariableNames=["Number" "Weather" string(char([21517 23383]))]); + arrowTabularObj = TabularType.FromTableFcn(t); + actualDisplay = evalc('disp(arrowTabularObj)'); + + classNameString = makeLinkString(FullClassName=TabularType.FullClassName, ... + ClassName=TabularType.ClassName, BoldFont=true); + header = compose(" Arrow %s with %s rows and %s columns:", classNameString, getNumString(2), getNumString(3)); + var1Field = char(makeFieldString(t.Properties.VariableNames{1}, "Float64" , "arrow.type.Float64Type")); + var2Field = char(makeFieldString(t.Properties.VariableNames{2}, "String" , "arrow.type.StringType")); + var3Field = char(makeFieldString(t.Properties.VariableNames{3}, "String", "arrow.type.StringType")); + expectedDisplay = [char(header), newline, ... + newline, ... + ' Schema:', newline, ... + newline, ... + ' ', var1Field, ' | ', var2Field, ' | ', var3Field, newline, ... + newline, ... + ' First Row:', newline, ... + newline, ... + ' ', '0.1 | "', char([26228 22825]), '" | "Kevin"', newline,... + newline]; + + testCase.verifyEqual(actualDisplay, expectedDisplay); + end + end +end + +function numString = getNumString(num) + if usejava("desktop") + numString = compose("%d", num); + else + numString = compose("%d", num); + end +end + +function str = makeFieldString(fieldName, classType, fullClassType) + import arrow.internal.test.display.makeLinkString + + if usejava("desktop") + name = compose("%s:", fieldName); + typeStr = makeLinkString(FullClassName=fullClassType, ClassName=classType, BoldFont=true); + str = name + " " + typeStr; + else + str = fieldName + ": " + classType; + end +end \ No newline at end of file diff --git a/matlab/test/arrow/tabular/tTabularInternal.m b/matlab/test/arrow/tabular/tTabularInternal.m new file mode 100644 index 0000000000000..28075d7763dea --- /dev/null +++ b/matlab/test/arrow/tabular/tTabularInternal.m @@ -0,0 +1,110 @@ +%TTABULARINTERNAL Unit tests for internal functionality of tabular types. + +% Licensed to the Apache Software Foundation (ASF) under one or more +% contributor license agreements. See the NOTICE file distributed with +% this work for additional information regarding copyright ownership. +% The ASF licenses this file to you under the Apache License, Version +% 2.0 (the "License"); you may not use this file except in compliance +% with the License. You may obtain a copy of the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, +% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +% implied. See the License for the specific language governing +% permissions and limitations under the License. + +classdef tTabularInternal < matlab.unittest.TestCase + + properties(TestParameter) + TabularObjectWithAllTypes + + TabularObjectWithOneColumn + + TabularObjectWithThreeRows + end + + methods (TestParameterDefinition, Static) + function TabularObjectWithAllTypes = initializeTabularObjectWithAllTypes() + arrays = arrow.internal.test.tabular.createAllSupportedArrayTypes(NumRows=1); + arrowTable = arrow.tabular.Table.fromArrays(arrays{:}); + arrowRecordBatch = arrow.tabular.Table.fromArrays(arrays{:}); + TabularObjectWithAllTypes = struct(Table=arrowTable, ... + RecordBatch=arrowRecordBatch); + end + + function TabularObjectWithOneColumn = initializeTabularObjectWithOneColumn() + t = table((1:3)'); + arrowTable = arrow.table(t); + arrowRecordBatch = arrow.recordBatch(t); + TabularObjectWithOneColumn = struct(Table=arrowTable, ... + RecordBatch=arrowRecordBatch); + end + + function TabularObjectWithThreeRows = initializeTabularObjectWithThreeRows() + t = table((1:3)', ["A"; "B"; "C"]); + arrowTable = arrow.table(t); + arrowRecordBatch = arrow.recordBatch(t); + TabularObjectWithThreeRows = struct(Table=arrowTable, ... + RecordBatch=arrowRecordBatch); + end + end + + methods (Test) + function RowWithAllTypes(testCase, TabularObjectWithAllTypes) + % Verify getRowAsString successfully returns the expected string + % when called on a Table/RecordBatch that contains all + % supported array types. + proxy = TabularObjectWithAllTypes.Proxy; + columnStrs = ["false", "2024-02-23", "2023-08-24", "78", "38", ... + "24", "48", "89", "102", "", """107""", "", ... + "00:03:44", "00:00:07.000000", "2024-02-10 00:00:00.000000", ... + "107", "143", "36", "51"]; + expectedString = strjoin(columnStrs, " | "); + actualString = proxy.getRowAsString(struct(Index=int64(1))); + testCase.verifyEqual(actualString, expectedString); + end + + function RowWithOneColumn(testCase, TabularObjectWithOneColumn) + % Verify getRowAsString successfully returns the expected string + % when called on a Table/RecordBatch with one column. + proxy = TabularObjectWithOneColumn.Proxy; + expectedString = "1"; + actualString = proxy.getRowAsString(struct(Index=int64(1))); + testCase.verifyEqual(actualString, expectedString); + end + + function RowIndex(testCase, TabularObjectWithThreeRows) + % Verify getRowAsString returns the expected string for + % the provided row index. + proxy = TabularObjectWithThreeRows.Proxy; + + actualString = proxy.getRowAsString(struct(Index=int64(1))); + expectedString = "1 | ""A"""; + testCase.verifyEqual(actualString, expectedString); + + actualString = proxy.getRowAsString(struct(Index=int64(2))); + expectedString = "2 | ""B"""; + testCase.verifyEqual(actualString, expectedString); + + actualString = proxy.getRowAsString(struct(Index=int64(3))); + expectedString = "3 | ""C"""; + testCase.verifyEqual(actualString, expectedString); + end + + function GetRowAsStringFailed(testCase, TabularObjectWithThreeRows) + % Verify getRowAsString throws an error with the ID + % arrow:tabular:GetRowAsStringFailed if provided invalid index + % values. + proxy = TabularObjectWithThreeRows.Proxy; + fcn = @() proxy.getRowAsString(struct(Index=int64(0))); + testCase.verifyError(fcn, "arrow:tabular:GetRowAsStringFailed"); + + fcn = @() proxy.getRowAsString(struct(Index=int64(4))); + testCase.verifyError(fcn, "arrow:tabular:GetRowAsStringFailed"); + end + + end + +end \ No newline at end of file diff --git a/matlab/test/arrow/type/traits/tListTraits.m b/matlab/test/arrow/type/traits/tListTraits.m index 444c977503123..2559d256d47f8 100644 --- a/matlab/test/arrow/type/traits/tListTraits.m +++ b/matlab/test/arrow/type/traits/tListTraits.m @@ -20,7 +20,7 @@ ArrayConstructor = @arrow.array.ListArray ArrayClassName = "arrow.array.ListArray" ArrayProxyClassName = "arrow.array.proxy.ListArray" - ArrayStaticConstructor = missing + ArrayStaticConstructor = @arrow.array.ListArray.fromMATLAB TypeConstructor = @arrow.type.ListType TypeClassName = "arrow.type.ListType" TypeProxyClassName = "arrow.type.proxy.ListType" diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 2a430055a5a86..529265235c746 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -21,7 +21,7 @@ cmake_minimum_required(VERSION 3.16) project(pyarrow) -set(PYARROW_VERSION "14.0.0-SNAPSHOT") +set(PYARROW_VERSION "15.0.0-SNAPSHOT") string(REGEX MATCH "^[0-9]+\\.[0-9]+\\.[0-9]+" PYARROW_BASE_VERSION "${PYARROW_VERSION}") # Running from a Python sdist tarball @@ -84,7 +84,7 @@ set(CMAKE_MACOSX_RPATH 1) if(DEFINED ENV{MACOSX_DEPLOYMENT_TARGET}) set(CMAKE_OSX_DEPLOYMENT_TARGET $ENV{MACOSX_DEPLOYMENT_TARGET}) else() - set(CMAKE_OSX_DEPLOYMENT_TARGET 10.14) + set(CMAKE_OSX_DEPLOYMENT_TARGET 10.15) endif() # Generate a Clang compile_commands.json "compilation database" file for use diff --git a/python/pyarrow/_compute.pyx b/python/pyarrow/_compute.pyx index 25f77d8160ea8..51dfdbf8ebbbe 100644 --- a/python/pyarrow/_compute.pyx +++ b/python/pyarrow/_compute.pyx @@ -1682,6 +1682,9 @@ class StrptimeOptions(_StrptimeOptions): ---------- format : str Pattern for parsing input strings as timestamps, such as "%Y/%m/%d". + Note that the semantics of the format follow the C/C++ strptime, not the Python one. + There are differences in behavior, for example how the "%y" placeholder + handles years with less than four digits. unit : str Timestamp unit of the output. Accepted values are "s", "ms", "us", "ns". diff --git a/python/pyarrow/_csv.pyx b/python/pyarrow/_csv.pyx index e532d8d8ab22a..508488c0c3b3c 100644 --- a/python/pyarrow/_csv.pyx +++ b/python/pyarrow/_csv.pyx @@ -26,8 +26,7 @@ from collections.abc import Mapping from pyarrow.includes.common cimport * from pyarrow.includes.libarrow cimport * -from pyarrow.includes.libarrow_python cimport (MakeInvalidRowHandler, - PyInvalidRowCallback) +from pyarrow.includes.libarrow_python cimport * from pyarrow.lib cimport (check_status, Field, MemoryPool, Schema, RecordBatchReader, ensure_type, maybe_unbox_memory_pool, get_input_stream, @@ -1251,7 +1250,7 @@ def read_csv(input_file, read_options=None, parse_options=None, CCSVParseOptions c_parse_options CCSVConvertOptions c_convert_options CIOContext io_context - shared_ptr[CCSVReader] reader + SharedPtrNoGIL[CCSVReader] reader shared_ptr[CTable] table _get_reader(input_file, read_options, &stream) diff --git a/python/pyarrow/_dataset.pxd b/python/pyarrow/_dataset.pxd index 210e5558009ec..bee9fc1f0987a 100644 --- a/python/pyarrow/_dataset.pxd +++ b/python/pyarrow/_dataset.pxd @@ -31,7 +31,7 @@ cdef CFileSource _make_file_source(object file, FileSystem filesystem=*) cdef class DatasetFactory(_Weakrefable): cdef: - shared_ptr[CDatasetFactory] wrapped + SharedPtrNoGIL[CDatasetFactory] wrapped CDatasetFactory* factory cdef init(self, const shared_ptr[CDatasetFactory]& sp) @@ -45,7 +45,7 @@ cdef class DatasetFactory(_Weakrefable): cdef class Dataset(_Weakrefable): cdef: - shared_ptr[CDataset] wrapped + SharedPtrNoGIL[CDataset] wrapped CDataset* dataset public dict _scan_options @@ -59,7 +59,7 @@ cdef class Dataset(_Weakrefable): cdef class Scanner(_Weakrefable): cdef: - shared_ptr[CScanner] wrapped + SharedPtrNoGIL[CScanner] wrapped CScanner* scanner cdef void init(self, const shared_ptr[CScanner]& sp) @@ -122,7 +122,7 @@ cdef class FileWriteOptions(_Weakrefable): cdef class Fragment(_Weakrefable): cdef: - shared_ptr[CFragment] wrapped + SharedPtrNoGIL[CFragment] wrapped CFragment* fragment cdef void init(self, const shared_ptr[CFragment]& sp) diff --git a/python/pyarrow/_dataset.pyx b/python/pyarrow/_dataset.pyx index 48ee676915311..d7d69965d000a 100644 --- a/python/pyarrow/_dataset.pyx +++ b/python/pyarrow/_dataset.pyx @@ -3227,7 +3227,7 @@ cdef class RecordBatchIterator(_Weakrefable): object iterator_owner # Iterator is a non-POD type and Cython uses offsetof, leading # to a compiler warning unless wrapped like so - shared_ptr[CRecordBatchIterator] iterator + SharedPtrNoGIL[CRecordBatchIterator] iterator def __init__(self): _forbid_instantiation(self.__class__, subclasses_instead=False) @@ -3273,7 +3273,7 @@ cdef class TaggedRecordBatchIterator(_Weakrefable): """An iterator over a sequence of record batches with fragments.""" cdef: object iterator_owner - shared_ptr[CTaggedRecordBatchIterator] iterator + SharedPtrNoGIL[CTaggedRecordBatchIterator] iterator def __init__(self): _forbid_instantiation(self.__class__, subclasses_instead=False) diff --git a/python/pyarrow/_flight.pyx b/python/pyarrow/_flight.pyx index 79aa24e4ce8e3..8fe9465a13d9c 100644 --- a/python/pyarrow/_flight.pyx +++ b/python/pyarrow/_flight.pyx @@ -1014,11 +1014,8 @@ cdef class _MetadataRecordBatchReader(_Weakrefable, _ReadPandasMixin): Returns ------- - data : FlightStreamChunk + chunk : FlightStreamChunk The next FlightStreamChunk in the stream. - app_metadata : Buffer or None - Application-specific metadata for the batch as defined by - Flight. Raises ------ diff --git a/python/pyarrow/_parquet.pyx b/python/pyarrow/_parquet.pyx index 48091367b2ff8..089ed7c75ce58 100644 --- a/python/pyarrow/_parquet.pyx +++ b/python/pyarrow/_parquet.pyx @@ -24,6 +24,7 @@ import warnings from cython.operator cimport dereference as deref from pyarrow.includes.common cimport * from pyarrow.includes.libarrow cimport * +from pyarrow.includes.libarrow_python cimport * from pyarrow.lib cimport (_Weakrefable, Buffer, Schema, check_status, MemoryPool, maybe_unbox_memory_pool, @@ -1165,7 +1166,7 @@ cdef class ParquetReader(_Weakrefable): cdef: object source CMemoryPool* pool - unique_ptr[FileReader] reader + UniquePtrNoGIL[FileReader] reader FileMetaData _metadata shared_ptr[CRandomAccessFile] rd_handle @@ -1334,7 +1335,7 @@ cdef class ParquetReader(_Weakrefable): vector[int] c_row_groups vector[int] c_column_indices shared_ptr[CRecordBatch] record_batch - unique_ptr[CRecordBatchReader] recordbatchreader + UniquePtrNoGIL[CRecordBatchReader] recordbatchreader self.set_batch_size(batch_size) @@ -1366,7 +1367,6 @@ cdef class ParquetReader(_Weakrefable): check_status( recordbatchreader.get().ReadNext(&record_batch) ) - if record_batch.get() == NULL: break diff --git a/python/pyarrow/_s3fs.pyx b/python/pyarrow/_s3fs.pyx index f7f2a5f80887c..13b8c748cb8ca 100644 --- a/python/pyarrow/_s3fs.pyx +++ b/python/pyarrow/_s3fs.pyx @@ -269,8 +269,6 @@ cdef class S3FileSystem(FileSystem): load_frequency=900, proxy_options=None, allow_bucket_creation=False, allow_bucket_deletion=False, retry_strategy: S3RetryStrategy = AwsStandardS3RetryStrategy(max_attempts=3)): - ensure_s3_initialized() - cdef: optional[CS3Options] options shared_ptr[CS3FileSystem] wrapped diff --git a/python/pyarrow/includes/libarrow_python.pxd b/python/pyarrow/includes/libarrow_python.pxd index 4d109fc660e08..b8a3041796f97 100644 --- a/python/pyarrow/includes/libarrow_python.pxd +++ b/python/pyarrow/includes/libarrow_python.pxd @@ -261,6 +261,14 @@ cdef extern from "arrow/python/common.h" namespace "arrow::py": void RestorePyError(const CStatus& status) except * +cdef extern from "arrow/python/common.h" namespace "arrow::py" nogil: + cdef cppclass SharedPtrNoGIL[T](shared_ptr[T]): + # This looks like the only way to satsify both Cython 2 and Cython 3 + SharedPtrNoGIL& operator=(...) + cdef cppclass UniquePtrNoGIL[T, DELETER=*](unique_ptr[T, DELETER]): + UniquePtrNoGIL& operator=(...) + + cdef extern from "arrow/python/inference.h" namespace "arrow::py": c_bool IsPyBool(object o) c_bool IsPyInt(object o) diff --git a/python/pyarrow/ipc.pxi b/python/pyarrow/ipc.pxi index deb3bb728aea9..5d20a4f8b72cb 100644 --- a/python/pyarrow/ipc.pxi +++ b/python/pyarrow/ipc.pxi @@ -632,7 +632,7 @@ cdef class RecordBatchReader(_Weakrefable): Notes ----- To import and export using the Arrow C stream interface, use the - ``_import_from_c`` and ``_export_from_c`` methods. However, keep in mind this + ``_import_from_c`` and ``_export_to_c`` methods. However, keep in mind this interface is intended for expert users. Examples @@ -977,7 +977,7 @@ cdef _wrap_record_batch_with_metadata(CRecordBatchWithMetadata c): cdef class _RecordBatchFileReader(_Weakrefable): cdef: - shared_ptr[CRecordBatchFileReader] reader + SharedPtrNoGIL[CRecordBatchFileReader] reader shared_ptr[CRandomAccessFile] file CIpcReadOptions options diff --git a/python/pyarrow/lib.pxd b/python/pyarrow/lib.pxd index 63ebe6aea8233..ae197eca1ca6b 100644 --- a/python/pyarrow/lib.pxd +++ b/python/pyarrow/lib.pxd @@ -552,12 +552,12 @@ cdef class CompressedOutputStream(NativeFile): cdef class _CRecordBatchWriter(_Weakrefable): cdef: - shared_ptr[CRecordBatchWriter] writer + SharedPtrNoGIL[CRecordBatchWriter] writer cdef class RecordBatchReader(_Weakrefable): cdef: - shared_ptr[CRecordBatchReader] reader + SharedPtrNoGIL[CRecordBatchReader] reader cdef class Codec(_Weakrefable): diff --git a/python/pyarrow/pandas_compat.py b/python/pyarrow/pandas_compat.py index e232603ba45ac..be29f68a13d5f 100644 --- a/python/pyarrow/pandas_compat.py +++ b/python/pyarrow/pandas_compat.py @@ -26,7 +26,6 @@ from itertools import zip_longest import json import operator -import pickle import re import warnings @@ -721,9 +720,6 @@ def _reconstruct_block(item, columns=None, extension_columns=None): block = _int.make_block(block_arr, placement=placement, klass=_int.DatetimeTZBlock, dtype=dtype) - elif 'object' in item: - block = _int.make_block(pickle.loads(block_arr), - placement=placement) elif 'py_array' in item: # create ExtensionBlock arr = item['py_array'] diff --git a/python/pyarrow/parquet/core.py b/python/pyarrow/parquet/core.py index 51ad955d19f78..072ab7fa11745 100644 --- a/python/pyarrow/parquet/core.py +++ b/python/pyarrow/parquet/core.py @@ -838,7 +838,7 @@ def _sanitize_table(table, new_schema, flavor): use_compliant_nested_type : bool, default True Whether to write compliant Parquet nested type (lists) as defined `here `_, defaults to ``False``. + LogicalTypes.md#nested-types>`_, defaults to ``True``. For ``use_compliant_nested_type=True``, this will write into a list with 3-level structure where the middle level, named ``list``, is a repeated group with a single field named ``element``:: diff --git a/python/pyarrow/src/arrow/python/arrow_to_pandas.cc b/python/pyarrow/src/arrow/python/arrow_to_pandas.cc index 91c7b8a45718e..8ed5d4e216e8e 100644 --- a/python/pyarrow/src/arrow/python/arrow_to_pandas.cc +++ b/python/pyarrow/src/arrow/python/arrow_to_pandas.cc @@ -1353,7 +1353,8 @@ struct ObjectWriterVisitor { std::is_same::value || (std::is_base_of::value && !std::is_same::value) || - std::is_base_of::value, + std::is_base_of::value || + std::is_base_of::value, Status> Visit(const Type& type) { return Status::NotImplemented("No implemented conversion to object dtype: ", diff --git a/python/pyarrow/src/arrow/python/common.h b/python/pyarrow/src/arrow/python/common.h index e36c0834fd424..4a7886695eadb 100644 --- a/python/pyarrow/src/arrow/python/common.h +++ b/python/pyarrow/src/arrow/python/common.h @@ -19,6 +19,7 @@ #include #include +#include #include #include "arrow/buffer.h" @@ -134,13 +135,15 @@ class ARROW_PYTHON_EXPORT PyAcquireGIL { // A RAII-style helper that releases the GIL until the end of a lexical block class ARROW_PYTHON_EXPORT PyReleaseGIL { public: - PyReleaseGIL() { saved_state_ = PyEval_SaveThread(); } - - ~PyReleaseGIL() { PyEval_RestoreThread(saved_state_); } + PyReleaseGIL() : ptr_(PyEval_SaveThread(), &unique_ptr_deleter) {} private: - PyThreadState* saved_state_; - ARROW_DISALLOW_COPY_AND_ASSIGN(PyReleaseGIL); + static void unique_ptr_deleter(PyThreadState* state) { + if (state) { + PyEval_RestoreThread(state); + } + } + std::unique_ptr ptr_; }; // A helper to call safely into the Python interpreter from arbitrary C++ code. @@ -188,7 +191,12 @@ class ARROW_PYTHON_EXPORT OwnedRef { return *this; } - ~OwnedRef() { reset(); } + ~OwnedRef() { + // GH-38626: destructor may be called after the Python interpreter is finalized. + if (Py_IsInitialized()) { + reset(); + } + } void reset(PyObject* obj) { Py_XDECREF(obj_); @@ -225,16 +233,56 @@ class ARROW_PYTHON_EXPORT OwnedRefNoGIL : public OwnedRef { explicit OwnedRefNoGIL(PyObject* obj) : OwnedRef(obj) {} ~OwnedRefNoGIL() { - // This destructor may be called after the Python interpreter is finalized. - // At least avoid spurious attempts to take the GIL when not necessary. - if (obj() == NULLPTR) { - return; + // GH-38626: destructor may be called after the Python interpreter is finalized. + if (Py_IsInitialized() && obj() != NULLPTR) { + PyAcquireGIL lock; + reset(); } - PyAcquireGIL lock; - reset(); } }; +template