Skip to content

Commit

Permalink
Branch spark 69 (apache#170)
Browse files Browse the repository at this point in the history
* Fixing the wrong type casting of TimeStamp to OTimeStamp when read from spark dataFrame.

* SPARK-69: Problem with license when we try to read from json and write to maprdb

(cherry picked from commit e39c4b7)
  • Loading branch information
HanumathRao authored and Mikhail Gorbov committed Jan 2, 2018
1 parent cb58cac commit 1d6e26d
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ private[spark] object MapRDBUtils {
tabDesc.setAutoSplit(true)
tabDesc.setPath(tableName)
tabDesc.setBulkLoad(bulkMode)
tabDesc.setInsertionOrder(false)
if (keys.isEmpty)
DBClient().createTable(tabDesc)
else
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ import org.apache.spark.rdd.RDD
import org.ojai.exceptions.TypeException
import org.ojai.store.QueryCondition
import com.mapr.db.MapRDB
import com.mapr.db.spark.dbclient.DBClient


object PredicateTests {
val tableName = "/tmp/user_profiles_predicates"
Expand All @@ -29,7 +31,11 @@ object PredicateTests {
if (MapRDB.tableExists(tableName))
MapRDB.deleteTable(tableName)
println("table successfully create :" + tableName)
MapRDB.createTable(tableName)
val tabDesc = DBClient().newTableDescriptor()
tabDesc.setAutoSplit(true)
tabDesc.setPath(tableName)
tabDesc.setInsertionOrder(false)
DBClient().createTable(tabDesc)
}

def runTests(sparkSession: SparkContext): Unit = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import org.apache.spark.sql.SparkSession
import org.ojai.types.{ODate, OTime, OTimestamp}
import com.mapr.org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator
import com.mapr.db.MapRDB
import com.mapr.db.spark.dbclient.DBClient

object SparkSqlAccessTests {
lazy val conf = new SparkConf()
Expand All @@ -31,7 +32,12 @@ object SparkSqlAccessTests {
def tableInitialization(tableName: String): Unit = {
if (MapRDB.tableExists(tableName))
MapRDB.deleteTable(tableName)
val table = MapRDB.createTable(tableName)
val tabDesc = DBClient().newTableDescriptor()
tabDesc.setAutoSplit(true)
tabDesc.setPath(tableName)
tabDesc.setInsertionOrder(false)
DBClient().createTable(tabDesc)
val table = DBClient().getTable(tableName)
table.insertOrReplace(getNullRecord())
table.insertOrReplace(getBooleanRecord())
table.insertOrReplace(getStringRecord())
Expand Down

0 comments on commit 1d6e26d

Please sign in to comment.