Skip to content

Commit

Permalink
[SPARK-8781] Fix variables in published pom.xml are not resolved
Browse files Browse the repository at this point in the history
The issue is summarized in the JIRA and is caused by this commit: 984ad60.

This patch reverts that commit and fixes the maven build in a different way. We limit the dependencies of `KinesisReceiverSuite` to avoid having to deal with the complexities in how maven deals with transitive test dependencies.

Author: Andrew Or <[email protected]>

Closes #7193 from andrewor14/fix-kinesis-pom and squashes the following commits:

ca3d5d4 [Andrew Or] Limit kinesis test dependencies
f24e09c [Andrew Or] Revert "[BUILD] Fix Maven build for Kinesis"

(cherry picked from commit 82cf331)
Signed-off-by: Andrew Or <[email protected]>

Conflicts:
	extras/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisReceiverSuite.scala
  • Loading branch information
Andrew Or committed Jul 2, 2015
1 parent 3a71cf9 commit 502e1fd
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 28 deletions.
7 changes: 0 additions & 7 deletions extras/kinesis-asl/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -40,13 +40,6 @@
<artifactId>spark-streaming_${scala.binary.version}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_${scala.binary.version}</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,18 +20,6 @@ import java.nio.ByteBuffer

import scala.collection.JavaConversions.seqAsJavaList

import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.Milliseconds
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.TestSuiteBase
import org.apache.spark.util.{ManualClock, Clock}

import org.mockito.Mockito._
import org.scalatest.BeforeAndAfter
import org.scalatest.Matchers
import org.scalatest.mock.MockitoSugar

import com.amazonaws.services.kinesis.clientlibrary.exceptions.InvalidStateException
import com.amazonaws.services.kinesis.clientlibrary.exceptions.KinesisClientLibDependencyException
import com.amazonaws.services.kinesis.clientlibrary.exceptions.ShutdownException
Expand All @@ -40,12 +28,24 @@ import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessorC
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.InitialPositionInStream
import com.amazonaws.services.kinesis.clientlibrary.types.ShutdownReason
import com.amazonaws.services.kinesis.model.Record
import org.mockito.Mockito._
// scalastyle:off
// To avoid introducing a dependency on Spark core tests, simply use scalatest's FunSuite
// here instead of our own SparkFunSuite. Introducing the dependency has caused problems
// in the past (SPARK-8781) that are complicated by bugs in the maven shade plugin (MSHADE-148).
import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
import org.scalatest.mock.MockitoSugar

import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Milliseconds, Seconds, StreamingContext}
import org.apache.spark.util.{Clock, ManualClock}

/**
* Suite of Kinesis streaming receiver tests focusing mostly on the KinesisRecordProcessor
*/
class KinesisReceiverSuite extends TestSuiteBase with Matchers with BeforeAndAfter
with MockitoSugar {
class KinesisReceiverSuite extends FunSuite with Matchers with BeforeAndAfter
with MockitoSugar {
// scalastyle:on

val app = "TestKinesisReceiver"
val stream = "mySparkStream"
Expand All @@ -65,24 +65,23 @@ class KinesisReceiverSuite extends TestSuiteBase with Matchers with BeforeAndAft
var checkpointStateMock: KinesisCheckpointState = _
var currentClockMock: Clock = _

override def beforeFunction() = {
before {
receiverMock = mock[KinesisReceiver]
checkpointerMock = mock[IRecordProcessorCheckpointer]
checkpointClockMock = mock[ManualClock]
checkpointStateMock = mock[KinesisCheckpointState]
currentClockMock = mock[Clock]
}

override def afterFunction(): Unit = {
super.afterFunction()
after {
// Since this suite was originally written using EasyMock, add this to preserve the old
// mocking semantics (see SPARK-5735 for more details)
verifyNoMoreInteractions(receiverMock, checkpointerMock, checkpointClockMock,
checkpointStateMock, currentClockMock)
}

test("kinesis utils api") {
val ssc = new StreamingContext(master, framework, batchDuration)
test("KinesisUtils API") {
val ssc = new StreamingContext("local[2]", getClass.getSimpleName, Seconds(1))
// Tests the API, does not actually test data receiving
val kinesisStream = KinesisUtils.createStream(ssc, "mySparkStream",
"https://kinesis.us-west-2.amazonaws.com", Seconds(2),
Expand Down
2 changes: 0 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -1367,8 +1367,6 @@
<version>2.2</version>
<configuration>
<shadedArtifactAttached>false</shadedArtifactAttached>
<!-- Work around MSHADE-148 -->
<createDependencyReducedPom>false</createDependencyReducedPom>
<artifactSet>
<includes>
<!-- At a minimum we must include this to force effective pom generation -->
Expand Down

0 comments on commit 502e1fd

Please sign in to comment.