Skip to content
This repository has been archived by the owner on Sep 18, 2023. It is now read-only.

[NSE-273]Support spark311 for branch 1.1.1 #319

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
85b5cb7
[NSE-262] fix remainer loss in decimal divide (#263)
rui-mo Apr 21, 2021
7c382d5
[NSE-261] ArrowDataSource: Add S3 Support (#270)
zhztheplayer Apr 22, 2021
7e7d8c2
[NSE-196] clean up configs in unit tests (#271)
rui-mo Apr 22, 2021
94af4ac
[NSE-265] Reserve enough memory before UnsafeAppend in builder (#266)
JkSelf Apr 23, 2021
333affd
[NSE-274] Comment to trigger tpc-h RAM test (#275)
zhztheplayer Apr 23, 2021
821bfa9
bump cmake to 3.16 (#281)
zhouyuan Apr 25, 2021
7415b7b
[NSE-276] Add option to switch Hadoop version (#277)
zhztheplayer Apr 27, 2021
1bf4d1d
[NSE-119] clean up on comments (#288)
zhouyuan Apr 27, 2021
c2c1737
[NSE-206]Update installation guide and configuration guide. (#289)
weiting-chen Apr 27, 2021
0b43448
[NSE-206]Fix Prerequisite and Arrow Installation Steps. (#290)
weiting-chen Apr 27, 2021
2591312
[NSE-245]Adding columnar RDD cache support (#246)
xuechendi Apr 29, 2021
1126320
[NSE-207] fix issues found from aggregate unit tests (#233)
rui-mo Apr 30, 2021
e2eb35d
[NSE-206]Update documents and License for 1.1.0 (#292)
Apr 30, 2021
0cc6bb9
[NSE-293] fix unsafemap with key = '0' (#294)
zhouyuan May 5, 2021
eb3f9da
[NSE-257] fix multiple slf4j bindings (#291)
rui-mo May 7, 2021
56bcb73
[NSE-297] Disable incremental compiler in GHA CI (#298)
zhztheplayer May 7, 2021
b6c267a
[NSE-285] ColumnarWindow: Support Date input in MAX/MIN (#286)
zhztheplayer May 8, 2021
bca337a
[NSE-304] Upgrade to Arrow 4.0.0: Change basic GHA TPC-H test target …
zhztheplayer May 10, 2021
2f5a532
[NSE-302] remove exception (#303)
rui-mo May 10, 2021
975fcaa
[NSE-273] support spark311 (#272)
zhouyuan May 11, 2021
8aa6b13
[NSE-311] Build reports errors (#312)
zhztheplayer May 12, 2021
e60389d
[NSE-257] fix the dependency issue on v2
rui-mo May 12, 2021
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
65 changes: 0 additions & 65 deletions .github/workflows/report_ram_log.yml

This file was deleted.

39 changes: 23 additions & 16 deletions .github/workflows/tpch.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,23 @@
name: Native SQL Engine TPC-H Suite

on:
pull_request
issue_comment:
types: [created, edited]

jobs:
ram-usage-test:
if: ${{ contains(github.event.pull_request.labels.*.name, 'RAM Report') }}
if: ${{ github.event.issue.pull_request && startsWith(github.event.comment.body, '@github-actions ram-usage-test') }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Checkout Pull Request
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PR_URL="${{ github.event.issue.pull_request.url }}"
PR_NUM=${PR_URL##*/}
echo "Checking out from PR #$PR_NUM based on URL: $PR_URL"
hub pr checkout $PR_NUM
- name: Set up JDK 1.8
uses: actions/setup-java@v1
with:
Expand All @@ -42,15 +51,15 @@ jobs:
run: |
cd /tmp
git clone https://github.com/oap-project/arrow.git
cd arrow && git checkout arrow-3.0.0-oap && cd cpp
cd arrow && git checkout arrow-4.0.0-oap && cd cpp
mkdir build && cd build
cmake .. -DARROW_JNI=ON -DARROW_GANDIVA_JAVA=ON -DARROW_GANDIVA=ON -DARROW_PARQUET=ON -DARROW_CSV=ON -DARROW_HDFS=ON -DARROW_FILESYSTEM=ON -DARROW_WITH_SNAPPY=ON -DARROW_JSON=ON -DARROW_DATASET=ON -DARROW_WITH_LZ4=ON -DARROW_JEMALLOC=OFF && make -j2
sudo make install
cd ../../java
mvn clean install -B -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn -P arrow-jni -am -Darrow.cpp.build.dir=/tmp/arrow/cpp/build/release/ -DskipTests -Dcheckstyle.skip
- name: Run Maven tests - BHJ
run: |
mvn test -B -pl native-sql-engine/core/ -am -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn -DmembersOnlySuites=com.intel.oap.tpc.h -DtagsToInclude=com.intel.oap.tags.BroadcastHashJoinMode -DargLine="-Xmx1G -XX:MaxDirectMemorySize=500M -Dio.netty.allocator.numDirectArena=1"
mvn test -B -P full-scala-compiler -Dbuild_arrow=OFF -pl native-sql-engine/core/ -am -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn -DmembersOnlySuites=com.intel.oap.tpc.h -DtagsToInclude=com.intel.oap.tags.BroadcastHashJoinMode -DargLine="-Xmx1G -XX:MaxDirectMemorySize=500M -Dio.netty.allocator.numDirectArena=1"
env:
MALLOC_ARENA_MAX: "4"
MAVEN_OPTS: "-Xmx1G"
Expand All @@ -59,7 +68,7 @@ jobs:
ENABLE_TPCH_TESTS: "true"
- name: Run Maven tests - SMJ
run: |
mvn test -B -pl native-sql-engine/core/ -am -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn -DmembersOnlySuites=com.intel.oap.tpc.h -DtagsToInclude=com.intel.oap.tags.SortMergeJoinMode -DargLine="-Xmx1G -XX:MaxDirectMemorySize=500M -Dio.netty.allocator.numDirectArena=1"
mvn test -B -P full-scala-compiler -Dbuild_arrow=OFF -pl native-sql-engine/core/ -am -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn -DmembersOnlySuites=com.intel.oap.tpc.h -DtagsToInclude=com.intel.oap.tags.SortMergeJoinMode -DargLine="-Xmx1G -XX:MaxDirectMemorySize=500M -Dio.netty.allocator.numDirectArena=1"
env:
MALLOC_ARENA_MAX: "4"
MAVEN_OPTS: "-Xmx1G"
Expand All @@ -69,14 +78,12 @@ jobs:
- run: |
cml-publish /tmp/comment_image_1.png --md > /tmp/comment.md
cml-publish /tmp/comment_image_2.png --md >> /tmp/comment.md
- run: echo "::set-output name=event_path::${GITHUB_EVENT_PATH}"
id: output-envs
- uses: actions/upload-artifact@v2
with:
name: comment_content
path: /tmp/comment.md
- uses: actions/upload-artifact@v2
with:
name: pr_event
path: ${{steps.output-envs.outputs.event_path}}

- name: Run Maven tests - Report
run: |
mvn test -B -P full-scala-compiler -Dbuild_arrow=OFF -Dbuild_protobuf=OFF -pl native-sql-engine/core/ -am -DmembersOnlySuites=com.intel.oap.tpc.h -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn -DtagsToInclude=com.intel.oap.tags.CommentOnContextPR -Dexec.skip=true
env:
PR_URL: ${{ github.event.issue.pull_request.url }}
MAVEN_OPTS: "-Xmx1G"
COMMENT_CONTENT_PATH: "/tmp/comment.md"
GITHUB_TOKEN: ${{ github.token }}
ENABLE_TPCH_TESTS: "true"
9 changes: 5 additions & 4 deletions .github/workflows/unittests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ jobs:
ctest -R

scala-unit-test:
if: ${{ github.event.issue.pull_request && startsWith(github.event.comment.body, '@github-actions scala-unit-test') }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
Expand All @@ -82,8 +83,8 @@ jobs:
- name: Install Spark
run: |
cd /tmp
wget http://archive.apache.org/dist/spark/spark-3.0.0/spark-3.0.0-bin-hadoop2.7.tgz
tar -xf spark-3.0.0-bin-hadoop2.7.tgz
wget http://archive.apache.org/dist/spark/spark-3.0.2/spark-3.0.2-bin-hadoop2.7.tgz
tar -xf spark-3.0.2-bin-hadoop2.7.tgz
- name: Install OAP optimized Arrow (C++ libs)
run: |
cd /tmp
Expand All @@ -100,9 +101,9 @@ jobs:
cd arrow-data-source
mvn clean install -DskipTests -Dbuild_arrow=OFF
cd ..
mvn clean package -am -pl native-sql-engine/core -DskipTests -Dbuild_arrow=OFF
mvn clean package -P full-scala-compiler -am -pl native-sql-engine/core -DskipTests -Dbuild_arrow=OFF
cd native-sql-engine/core/
mvn test -DmembersOnlySuites=org.apache.spark.sql.travis -am -DfailIfNoTests=false -Dexec.skip=true -DargLine="-Dspark.test.home=/tmp/spark-3.0.0-bin-hadoop2.7" &> log-file.log
mvn test -P full-scala-compiler -DmembersOnlySuites=org.apache.spark.sql.travis -am -DfailIfNoTests=false -Dexec.skip=true -DargLine="-Dspark.test.home=/tmp/spark-3.0.0-bin-hadoop2.7" &> log-file.log
echo '#!/bin/bash' > grep.sh
echo "module_tested=0; module_should_test=1; tests_total=0; while read -r line; do num=\$(echo \"\$line\" | grep -o -E '[0-9]+'); tests_total=\$((tests_total+num)); done <<<\"\$(grep \"Total number of tests run:\" log-file.log)\"; succeed_total=0; while read -r line; do [[ \$line =~ [^0-9]*([0-9]+)\, ]]; num=\${BASH_REMATCH[1]}; succeed_total=\$((succeed_total+num)); let module_tested++; done <<<\"\$(grep \"succeeded\" log-file.log)\"; if test \$tests_total -eq \$succeed_total -a \$module_tested -eq \$module_should_test; then echo \"All unit tests succeed\"; else echo \"Unit tests failed\"; exit 1; fi" >> grep.sh
bash grep.sh
Expand Down
Loading