Skip to content

Commit

Permalink
Bump aws-hudi to 1.0.0-beta2 (#71)
Browse files Browse the repository at this point in the history
Although we remain on Hudi 0.15.0, this bumps the AWS module of Hudi to
version 1.x. This is safe because the AWS module is backwards compatible
to 0.15.0. This change lets us use features only in 1.x like assumed
role credentials provider.
  • Loading branch information
istreeter authored and oguzhanunlu committed Nov 1, 2024
1 parent e310ab8 commit b74339d
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,7 @@ abstract class AbstractSparkSpec extends Specification with CatsEffect {

override val Timeout = 60.seconds

// TODO: After Hudi 1.0.0 is released, remove `skipAll` to re-enable these tests

def is = skipAll ^ sequential ^ s2"""
def is = sequential ^ s2"""
The lake loader should:
Write a single window of events into a lake table $e1
Create unstruct_* column for unstructured events with valid schemas $e2
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@ import fs2.io.file.Path

class HudiSpec extends AbstractSparkSpec {

// TODO: After Hudi 1.0.0 is released, remove `skipAll` to re-enable these tests
override def is = skipAll ^ super.is

override def target: TestConfig.Target = TestConfig.Hudi

/** Reads the table back into memory, so we can make assertions on the app's output */
Expand Down
4 changes: 3 additions & 1 deletion project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ object Dependencies {
// Spark
val delta = "3.2.0"
val hudi = "0.15.0"
val hudiAws = "1.0.0-beta2"
val iceberg = "1.5.2"
val hadoop = "3.4.0"
val gcsConnector = "hadoop3-2.2.17"
Expand Down Expand Up @@ -88,14 +89,15 @@ object Dependencies {
// spark and hadoop
val delta = "io.delta" %% "delta-spark" % V.delta
val hudi = "org.apache.hudi" %% s"hudi-spark${V.Spark.forHudiMinor}-bundle" % V.hudi
val hudiAws = "org.apache.hudi" % "hudi-aws" % V.hudi
val iceberg = "org.apache.iceberg" %% s"iceberg-spark-runtime-${V.Spark.forIcebergDeltaMinor}" % V.iceberg
val hadoopClient = "org.apache.hadoop" % "hadoop-client-runtime" % V.hadoop
val hadoopAzure = "org.apache.hadoop" % "hadoop-azure" % V.hadoop
val hadoopAws = "org.apache.hadoop" % "hadoop-aws" % V.hadoop
val gcsConnector = "com.google.cloud.bigdataoss" % "gcs-connector" % V.gcsConnector
val hiveCommon = "org.apache.hive" % "hive-common" % V.hive

val hudiAws = ("org.apache.hudi" % "hudi-aws" % V.hudiAws).excludeAll(ExclusionRule(organization = "org.apache.hudi"))

// java
val slf4j = "org.slf4j" % "slf4j-simple" % V.slf4j
val azureIdentity = "com.azure" % "azure-identity" % V.azureSdk
Expand Down

0 comments on commit b74339d

Please sign in to comment.