Skip to content

Commit

Permalink
[SPARK-32873][BUILD] Fix code which causes error when build with sbt …
Browse files Browse the repository at this point in the history
…and Scala 2.13

### What changes were proposed in this pull request?

This PR fix code which causes error when build with sbt and Scala 2.13 like as follows.
```
[error] [warn] /home/kou/work/oss/spark-scala-2.13/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaRDD.scala:251: method with a single empty parameter list overrides method without any parameter list
[error] [warn]   override def hasNext(): Boolean = requestOffset < part.untilOffset
[error] [warn]
[error] [warn] /home/kou/work/oss/spark-scala-2.13/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaRDD.scala:294: method with a single empty parameter list overrides method without any parameter list
[error] [warn]   override def hasNext(): Boolean = okNext
```

More specifically, what this PR fixes are

* Methods which has an empty parameter list and overrides an method which has no parameter list.
```
override def hasNext(): Boolean = okNext
```

* Methods which has no parameter list and overrides an method which has an empty parameter list.
```
      override def next: (Int, Double) = {
```

* Infix operator expression that the operator wraps.
```
    3L * math.min(k, numFeatures) * math.min(k, numFeatures)
    3L * math.min(k, numFeatures) * math.min(k, numFeatures) +
    + math.max(math.max(k, numFeatures), 4L * math.min(k, numFeatures)
      math.max(math.max(k, numFeatures), 4L * math.min(k, numFeatures) *
    * math.min(k, numFeatures) + 4L * math.min(k, numFeatures))
```

### Why are the changes needed?

For building Spark with sbt and Scala 2.13.

### Does this PR introduce _any_ user-facing change?

No.

### How was this patch tested?

After this change and #29742 applied, compile passed with the following command.
```
build/sbt -Pscala-2.13  -Phive -Phive-thriftserver -Pyarn -Pkubernetes compile test:compile
```

Closes #29745 from sarutak/fix-code-for-sbt-and-spark-2.13.

Authored-by: Kousuke Saruta <sarutak@oss.nttdata.com>
Signed-off-by: HyukjinKwon <gurwls223@apache.org>
  • Loading branch information
sarutak authored and HyukjinKwon committed Sep 14, 2020
1 parent 742fcff commit b121f0d
Show file tree
Hide file tree
Showing 10 changed files with 15 additions and 15 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ private class KafkaRDDIterator[K, V](
}
}

override def hasNext(): Boolean = requestOffset < part.untilOffset
override def hasNext: Boolean = requestOffset < part.untilOffset

override def next(): ConsumerRecord[K, V] = {
if (!hasNext) {
Expand Down Expand Up @@ -291,7 +291,7 @@ private class CompactedKafkaRDDIterator[K, V](

private var okNext: Boolean = true

override def hasNext(): Boolean = okNext
override def hasNext: Boolean = okNext

override def next(): ConsumerRecord[K, V] = {
if (!hasNext) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -786,7 +786,7 @@ class SparseVector @Since("2.0.0") (

override def hasNext: Boolean = i < localSize

override def next: (Int, Double) = {
override def next(): (Int, Double) = {
val v = if (i == k) {
j += 1
k = if (j < localNumActives) localIndices(j) else -1
Expand Down
6 changes: 3 additions & 3 deletions mllib/src/main/scala/org/apache/spark/mllib/feature/PCA.scala
Original file line number Diff line number Diff line change
Expand Up @@ -124,9 +124,9 @@ private[feature] object PCAUtil {
// 6e541be066d547a097f5089165cd7c38c3ca276d/math/src/main/scala/breeze/linalg/
// functions/svd.scala#L87
def memoryCost(k: Int, numFeatures: Int): Long = {
3L * math.min(k, numFeatures) * math.min(k, numFeatures)
+ math.max(math.max(k, numFeatures), 4L * math.min(k, numFeatures)
* math.min(k, numFeatures) + 4L * math.min(k, numFeatures))
3L * math.min(k, numFeatures) * math.min(k, numFeatures) +
math.max(math.max(k, numFeatures), 4L * math.min(k, numFeatures) *
math.min(k, numFeatures) + 4L * math.min(k, numFeatures))
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -984,7 +984,7 @@ class SparseVector @Since("1.0.0") (

override def hasNext: Boolean = i < localSize

override def next: (Int, Double) = {
override def next(): (Int, Double) = {
val v = if (i == k) {
j += 1
k = if (j < localNumActives) localIndices(j) else -1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ case class ExecutedCommandExec(cmd: RunnableCommand) extends LeafExecNode {

override def executeCollect(): Array[InternalRow] = sideEffectResult.toArray

override def executeToIterator: Iterator[InternalRow] = sideEffectResult.toIterator
override def executeToIterator(): Iterator[InternalRow] = sideEffectResult.toIterator

override def executeTake(limit: Int): Array[InternalRow] = sideEffectResult.take(limit).toArray

Expand Down Expand Up @@ -119,7 +119,7 @@ case class DataWritingCommandExec(cmd: DataWritingCommand, child: SparkPlan)

override def executeCollect(): Array[InternalRow] = sideEffectResult.toArray

override def executeToIterator: Iterator[InternalRow] = sideEffectResult.toIterator
override def executeToIterator(): Iterator[InternalRow] = sideEffectResult.toIterator

override def executeTake(limit: Int): Array[InternalRow] = sideEffectResult.take(limit).toArray

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ abstract class V2CommandExec extends SparkPlan {
*/
override def executeCollect(): Array[InternalRow] = result.toArray

override def executeToIterator: Iterator[InternalRow] = result.toIterator
override def executeToIterator(): Iterator[InternalRow] = result.toIterator

override def executeTake(limit: Int): Array[InternalRow] = result.take(limit).toArray

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ class JDBCTableCatalog extends TableCatalog with Logging {
.getTables(null, schemaPattern, "%", Array("TABLE"));
new Iterator[Identifier] {
def hasNext = rs.next()
def next = Identifier.of(namespace, rs.getString("TABLE_NAME"))
def next() = Identifier.of(namespace, rs.getString("TABLE_NAME"))
}.toArray
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ class SymmetricHashJoinStateManager(
return null
}

override def close: Unit = {}
override def close(): Unit = {}
}
}

Expand Down Expand Up @@ -280,7 +280,7 @@ class SymmetricHashJoinStateManager(
return reusedRet.withNew(currentKey, currentValue.value, currentValue.matched)
}

override def close: Unit = {}
override def close(): Unit = {}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ class WriteAheadLogBackedBlockRDD[T: ClassTag](
@transient private val hadoopConfig = sc.hadoopConfiguration
private val broadcastedHadoopConf = new SerializableConfiguration(hadoopConfig)

override def isValid(): Boolean = true
override def isValid: Boolean = true

override def getPartitions: Array[Partition] = {
assertValid()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ private[streaming] class CountingIterator[T](iterator: Iterator[T]) extends Iter

private def isFullyConsumed: Boolean = !iterator.hasNext

def hasNext(): Boolean = iterator.hasNext
def hasNext: Boolean = iterator.hasNext

def count(): Option[Long] = {
if (isFullyConsumed) Some(_count) else None
Expand Down

0 comments on commit b121f0d

Please sign in to comment.