Skip to content
This repository has been archived by the owner on Sep 18, 2023. It is now read-only.

[NSE-207] fix issues found in scala unit tests #356

Merged
merged 18 commits into from
Jun 23, 2021
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
convert travis tests to native sql tests
rui-mo committed Jun 22, 2021

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature. The key has expired.
commit a27eb468d620f9184fd26b7b4bc2fa64dea26f10
2 changes: 1 addition & 1 deletion .github/workflows/unittests.yml
Original file line number Diff line number Diff line change
@@ -101,7 +101,7 @@ jobs:
mvn clean install -DskipTests -Dbuild_arrow=OFF
cd ..
mvn clean package -P full-scala-compiler -am -pl native-sql-engine/core -DskipTests -Dbuild_arrow=OFF
mvn test -P full-scala-compiler -DmembersOnlySuites=org.apache.spark.sql.travis -am -DfailIfNoTests=false -Dexec.skip=true -DargLine="-Dspark.test.home=/tmp/spark-3.0.0-bin-hadoop2.7" &> log-file.log
mvn test -P full-scala-compiler -DmembersOnlySuites=org.apache.spark.sql.nativesql -am -DfailIfNoTests=false -Dexec.skip=true -DargLine="-Dspark.test.home=/tmp/spark-3.0.0-bin-hadoop2.7" &> log-file.log
echo '#!/bin/bash' > grep.sh
echo "module_tested=0; module_should_test=8; tests_total=0; while read -r line; do num=\$(echo \"\$line\" | grep -o -E '[0-9]+'); tests_total=\$((tests_total+num)); done <<<\"\$(grep \"Total number of tests run:\" log-file.log)\"; succeed_total=0; while read -r line; do [[ \$line =~ [^0-9]*([0-9]+)\, ]]; num=\${BASH_REMATCH[1]}; succeed_total=\$((succeed_total+num)); let module_tested++; done <<<\"\$(grep \"succeeded\" log-file.log)\"; if test \$tests_total -eq \$succeed_total -a \$module_tested -eq \$module_should_test; then echo \"All unit tests succeed\"; else echo \"Unit tests failed\"; exit 1; fi" >> grep.sh
bash grep.sh
Original file line number Diff line number Diff line change
@@ -15,7 +15,7 @@
* limitations under the License.
*/

package org.apache.spark.sql.travis
package org.apache.spark.sql.nativesql

import java.io.File
import java.net.URI
@@ -43,7 +43,7 @@ import org.apache.spark.sql.types.{IntegerType, StructType}
import org.apache.spark.sql.util.QueryExecutionListener
import org.apache.spark.util.Utils

class TravisColumnarAdaptiveQueryExecSuite
class NativeColumnarAdaptiveQueryExecSuite
extends QueryTest
with SharedSparkSession
with AdaptiveSparkPlanHelper {
Original file line number Diff line number Diff line change
@@ -15,7 +15,7 @@
* limitations under the License.
*/

package org.apache.spark.sql.travis
package org.apache.spark.sql.nativesql

import org.apache.spark.sql.{AnalysisException, Column, DataFrame, QueryTest, Row}

@@ -34,7 +34,7 @@ import org.apache.spark.sql.types._

case class Fact(date: Int, hour: Int, minute: Int, room_name: String, temp: Double)

class TravisDataFrameAggregateSuite extends QueryTest
class NativeDataFrameAggregateSuite extends QueryTest
with SharedSparkSession
with AdaptiveSparkPlanHelper {
import testImplicits._
Original file line number Diff line number Diff line change
@@ -15,7 +15,7 @@
* limitations under the License.
*/

package org.apache.spark.sql.travis
package org.apache.spark.sql.nativesql

import org.apache.spark.sql.{DataFrame, QueryTest, Row}

@@ -35,7 +35,7 @@ import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types._

class TravisDataFrameJoinSuite extends QueryTest
class NativeDataFrameJoinSuite extends QueryTest
with SharedSparkSession
with AdaptiveSparkPlanHelper {
import testImplicits._
Original file line number Diff line number Diff line change
@@ -15,7 +15,7 @@
* limitations under the License.
*/

package org.apache.spark.sql.travis
package org.apache.spark.sql.nativesql

import com.intel.oap.execution.ColumnarHashAggregateExec
import com.intel.oap.datasource.parquet.ParquetReader
@@ -27,7 +27,7 @@ import org.apache.spark.sql.execution.exchange.ShuffleExchangeExec
import org.apache.spark.sql.execution.{ColumnarShuffleExchangeExec, ColumnarToRowExec, RowToColumnarExec}
import org.apache.spark.sql.test.SharedSparkSession

class TravisRepartitionSuite extends QueryTest with SharedSparkSession {
class NativeRepartitionSuite extends QueryTest with SharedSparkSession {
import testImplicits._

override def sparkConf: SparkConf =
@@ -66,7 +66,7 @@ class TravisRepartitionSuite extends QueryTest with SharedSparkSession {
def withRepartition: (DataFrame => DataFrame) => Unit = withInput(input)(None, _)
}

class TravisTPCHTableRepartitionSuite extends TravisRepartitionSuite {
class NativeTPCHTableRepartitionSuite extends NativeRepartitionSuite {
import testImplicits._

val filePath = getTestResourcePath(
@@ -97,7 +97,7 @@ class TravisTPCHTableRepartitionSuite extends TravisRepartitionSuite {
}
}

class TravisDisableColumnarShuffleSuite extends TravisRepartitionSuite {
class NativeDisableColumnarShuffleSuite extends NativeRepartitionSuite {
import testImplicits._

override def sparkConf: SparkConf = {
@@ -128,7 +128,7 @@ class TravisDisableColumnarShuffleSuite extends TravisRepartitionSuite {
}
}

class TravisAdaptiveQueryExecRepartitionSuite extends TravisTPCHTableRepartitionSuite {
class NativeAdaptiveQueryExecRepartitionSuite extends NativeTPCHTableRepartitionSuite {
override def sparkConf: SparkConf = {
super.sparkConf
.set("spark.sql.adaptive.enabled", "true")
@@ -167,7 +167,7 @@ class TravisAdaptiveQueryExecRepartitionSuite extends TravisTPCHTableRepartition

}

class TravisReuseExchangeSuite extends TravisRepartitionSuite {
class NativeReuseExchangeSuite extends NativeRepartitionSuite {
val filePath = getTestResourcePath(
"test-data/part-00000-d648dd34-c9d2-4fe9-87f2-770ef3551442-c000.snappy.parquet")

Original file line number Diff line number Diff line change
@@ -15,15 +15,15 @@
* limitations under the License.
*/

package org.apache.spark.sql.travis
package org.apache.spark.sql.nativesql

import java.sql.{Date, Timestamp}

import org.apache.spark.sql.{QueryTest, Row}
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
import org.apache.spark.sql.test.SharedSparkSession

class TravisSQLConvertedSuite extends QueryTest
class NativeSQLConvertedSuite extends QueryTest
with SharedSparkSession
with AdaptiveSparkPlanHelper {
import testImplicits._