Skip to content

Commit

Permalink
SPARK-1352 - Comment style single space before ending */ check.
Browse files Browse the repository at this point in the history
Author: Prashant Sharma <[email protected]>

Closes apache#261 from ScrapCodes/comment-style-check2 and squashes the following commits:

6cde61e [Prashant Sharma] comment style space before ending */ check.
  • Loading branch information
ScrapCodes authored and pwendell committed Mar 30, 2014
1 parent 95d7d2a commit d666053
Show file tree
Hide file tree
Showing 13 changed files with 32 additions and 31 deletions.
12 changes: 6 additions & 6 deletions core/src/main/scala/org/apache/spark/network/Connection.scala
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ class SendingConnection(val address: InetSocketAddress, selector_ : Selector,

def addMessage(message: Message) {
messages.synchronized{
/* messages += message*/
/* messages += message */
messages.enqueue(message)
logDebug("Added [" + message + "] to outbox for sending to " +
"[" + getRemoteConnectionManagerId() + "]")
Expand All @@ -222,7 +222,7 @@ class SendingConnection(val address: InetSocketAddress, selector_ : Selector,
messages.synchronized {
while (!messages.isEmpty) {
/* nextMessageToBeUsed = nextMessageToBeUsed % messages.size */
/* val message = messages(nextMessageToBeUsed)*/
/* val message = messages(nextMessageToBeUsed) */
val message = messages.dequeue
val chunk = message.getChunkForSending(defaultChunkSize)
if (chunk.isDefined) {
Expand Down Expand Up @@ -262,7 +262,7 @@ class SendingConnection(val address: InetSocketAddress, selector_ : Selector,

val currentBuffers = new ArrayBuffer[ByteBuffer]()

/* channel.socket.setSendBufferSize(256 * 1024)*/
/* channel.socket.setSendBufferSize(256 * 1024) */

override def getRemoteAddress() = address

Expand Down Expand Up @@ -355,7 +355,7 @@ class SendingConnection(val address: InetSocketAddress, selector_ : Selector,
}
case None => {
// changeConnectionKeyInterest(0)
/* key.interestOps(0)*/
/* key.interestOps(0) */
return false
}
}
Expand Down Expand Up @@ -540,10 +540,10 @@ private[spark] class ReceivingConnection(
return false
}

/* logDebug("Read " + bytesRead + " bytes for the buffer")*/
/* logDebug("Read " + bytesRead + " bytes for the buffer") */

if (currentChunk.buffer.remaining == 0) {
/* println("Filled buffer at " + System.currentTimeMillis)*/
/* println("Filled buffer at " + System.currentTimeMillis) */
val bufferMessage = inbox.getMessageForChunk(currentChunk).get
if (bufferMessage.isCompletelyReceived) {
bufferMessage.flip
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -505,7 +505,7 @@ private[spark] class ConnectionManager(port: Int, conf: SparkConf,
}
}
handleMessageExecutor.execute(runnable)
/* handleMessage(connection, message)*/
/* handleMessage(connection, message) */
}

private def handleClientAuthentication(
Expand Down Expand Up @@ -859,14 +859,14 @@ private[spark] object ConnectionManager {
None
})

/* testSequentialSending(manager)*/
/* System.gc()*/
/* testSequentialSending(manager) */
/* System.gc() */

/* testParallelSending(manager)*/
/* System.gc()*/
/* testParallelSending(manager) */
/* System.gc() */

/* testParallelDecreasingSending(manager)*/
/* System.gc()*/
/* testParallelDecreasingSending(manager) */
/* System.gc() */

testContinuousSending(manager)
System.gc()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ private[spark] object ConnectionManagerTest extends Logging{
val slaves = slavesFile.mkString.split("\n")
slavesFile.close()

/* println("Slaves")*/
/* slaves.foreach(println)*/
/* println("Slaves") */
/* slaves.foreach(println) */
val tasknum = if (args.length > 2) args(2).toInt else slaves.length
val size = ( if (args.length > 3) (args(3).toInt) else 10 ) * 1024 * 1024
val count = if (args.length > 4) args(4).toInt else 3
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ private[spark] object ReceiverTest {
println("Started connection manager with id = " + manager.id)

manager.onReceiveMessage((msg: Message, id: ConnectionManagerId) => {
/* println("Received [" + msg + "] from [" + id + "] at " + System.currentTimeMillis)*/
/* println("Received [" + msg + "] from [" + id + "] at " + System.currentTimeMillis) */
val buffer = ByteBuffer.wrap("response".getBytes)
Some(Message.createBufferMessage(buffer, msg.id))
})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ private[spark] object SenderTest {
(0 until count).foreach(i => {
val dataMessage = Message.createBufferMessage(buffer.duplicate)
val startTime = System.currentTimeMillis
/* println("Started timer at " + startTime)*/
/* println("Started timer at " + startTime) */
val responseStr = manager.sendMessageReliablySync(targetConnectionManagerId, dataMessage)
.map { response =>
val buffer = response.asInstanceOf[BufferMessage].buffers(0)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import org.apache.spark.scheduler.Schedulable
import org.apache.spark.ui.Page._
import org.apache.spark.ui.UIUtils

/** Page showing list of all ongoing and recently finished stages and pools*/
/** Page showing list of all ongoing and recently finished stages and pools */
private[ui] class IndexPage(parent: JobProgressUI) {
private val appName = parent.appName
private val basePath = parent.basePath
Expand Down
4 changes: 2 additions & 2 deletions core/src/main/scala/org/apache/spark/util/MutablePair.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ package org.apache.spark.util
* @param _1 Element 1 of this MutablePair
* @param _2 Element 2 of this MutablePair
*/
case class MutablePair[@specialized(Int, Long, Double, Char, Boolean/* , AnyRef*/) T1,
@specialized(Int, Long, Double, Char, Boolean/* , AnyRef*/) T2]
case class MutablePair[@specialized(Int, Long, Double, Char, Boolean/* , AnyRef */) T1,
@specialized(Int, Long, Double, Char, Boolean/* , AnyRef */) T2]
(var _1: T1, var _2: T2)
extends Product2[T1, T2]
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import java.net.ServerSocket
import java.io.PrintWriter
import util.Random

/** Represents a page view on a website with associated dimension data.*/
/** Represents a page view on a website with associated dimension data. */
class PageView(val url : String, val status : Int, val zipCode : Int, val userID : Int)
extends Serializable {
override def toString() : String = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ class FlumeEventServer(receiver : FlumeReceiver) extends AvroSourceProtocol {
}

/** A NetworkReceiver which listens for events using the
* Flume Avro interface.*/
* Flume Avro interface. */
private[streaming]
class FlumeReceiver(
host: String,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ class VertexBroadcastMsg[@specialized(Int, Long, Double, Boolean) T](
* @param data value to send
*/
private[graphx]
class MessageToPartition[@specialized(Int, Long, Double, Char, Boolean/* , AnyRef*/) T](
class MessageToPartition[@specialized(Int, Long, Double, Char, Boolean/* , AnyRef */) T](
@transient var partition: PartitionID,
var data: T)
extends Product2[PartitionID, T] with Serializable {
Expand Down
3 changes: 1 addition & 2 deletions project/project/SparkPluginBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,7 @@ object SparkPluginDef extends Build {
version := sparkVersion,
scalaVersion := "2.10.3",
scalacOptions := Seq("-unchecked", "-deprecation"),
libraryDependencies ++= Dependencies.scalaStyle,
sbtPlugin := true
libraryDependencies ++= Dependencies.scalaStyle
)

object Dependencies {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,15 @@ import scalariform.lexer.{MultiLineComment, ScalaDocComment, SingleLineComment,
import scalariform.parser.CompilationUnit

class SparkSpaceAfterCommentStartChecker extends ScalariformChecker {
val errorKey: String = "insert.a.single.space.after.comment.start"
val errorKey: String = "insert.a.single.space.after.comment.start.and.before.end"

private def multiLineCommentRegex(comment: Token) =
Pattern.compile( """/\*\S+.*""", Pattern.DOTALL).matcher(comment.text.trim).matches()
Pattern.compile( """/\*\S+.*""", Pattern.DOTALL).matcher(comment.text.trim).matches() ||
Pattern.compile( """/\*.*\S\*/""", Pattern.DOTALL).matcher(comment.text.trim).matches()

private def scalaDocPatternRegex(comment: Token) =
Pattern.compile( """/\*\*\S+.*""", Pattern.DOTALL).matcher(comment.text.trim).matches()
Pattern.compile( """/\*\*\S+.*""", Pattern.DOTALL).matcher(comment.text.trim).matches() ||
Pattern.compile( """/\*\*.*\S\*/""", Pattern.DOTALL).matcher(comment.text.trim).matches()

private def singleLineCommentRegex(comment: Token): Boolean =
comment.text.trim.matches( """//\S+.*""") && !comment.text.trim.matches( """///+""")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,19 +57,19 @@ import org.apache.spark.sql.catalyst.types._
case class ParquetRelation(tableName: String, path: String)
extends BaseRelation with MultiInstanceRelation {

/** Schema derived from ParquetFile **/
/** Schema derived from ParquetFile */
def parquetSchema: MessageType =
ParquetTypesConverter
.readMetaData(new Path(path))
.getFileMetaData
.getSchema

/** Attributes **/
/** Attributes */
val attributes =
ParquetTypesConverter
.convertToAttributes(parquetSchema)

/** Output **/
/** Output */
override val output = attributes

// Parquet files have no concepts of keys, therefore no Partitioner
Expand Down

0 comments on commit d666053

Please sign in to comment.