Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-5038] Add explicit return type for implicit functions. #3860

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1708,19 +1708,19 @@ object SparkContext extends Logging {

// Implicit conversions to common Writable types, for saveAsSequenceFile

implicit def intToIntWritable(i: Int) = new IntWritable(i)
implicit def intToIntWritable(i: Int): IntWritable = new IntWritable(i)

implicit def longToLongWritable(l: Long) = new LongWritable(l)
implicit def longToLongWritable(l: Long): LongWritable = new LongWritable(l)

implicit def floatToFloatWritable(f: Float) = new FloatWritable(f)
implicit def floatToFloatWritable(f: Float): FloatWritable = new FloatWritable(f)

implicit def doubleToDoubleWritable(d: Double) = new DoubleWritable(d)
implicit def doubleToDoubleWritable(d: Double): DoubleWritable = new DoubleWritable(d)

implicit def boolToBoolWritable (b: Boolean) = new BooleanWritable(b)
implicit def boolToBoolWritable (b: Boolean): BooleanWritable = new BooleanWritable(b)

implicit def bytesToBytesWritable (aob: Array[Byte]) = new BytesWritable(aob)
implicit def bytesToBytesWritable (aob: Array[Byte]): BytesWritable = new BytesWritable(aob)

implicit def stringToText(s: String) = new Text(s)
implicit def stringToText(s: String): Text = new Text(s)

private implicit def arrayToArrayWritable[T <% Writable: ClassTag](arr: Traversable[T])
: ArrayWritable = {
Expand Down
38 changes: 19 additions & 19 deletions core/src/main/scala/org/apache/spark/util/Vector.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ import org.apache.spark.util.random.XORShiftRandom

@deprecated("Use Vectors.dense from Spark's mllib.linalg package instead.", "1.0.0")
class Vector(val elements: Array[Double]) extends Serializable {
def length = elements.length
def length: Int = elements.length

def apply(index: Int) = elements(index)
def apply(index: Int): Double = elements(index)

def + (other: Vector): Vector = {
if (length != other.length) {
Expand All @@ -35,7 +35,7 @@ class Vector(val elements: Array[Double]) extends Serializable {
Vector(length, i => this(i) + other(i))
}

def add(other: Vector) = this + other
def add(other: Vector): Vector = this + other

def - (other: Vector): Vector = {
if (length != other.length) {
Expand All @@ -44,7 +44,7 @@ class Vector(val elements: Array[Double]) extends Serializable {
Vector(length, i => this(i) - other(i))
}

def subtract(other: Vector) = this - other
def subtract(other: Vector): Vector = this - other

def dot(other: Vector): Double = {
if (length != other.length) {
Expand Down Expand Up @@ -93,19 +93,19 @@ class Vector(val elements: Array[Double]) extends Serializable {
this
}

def addInPlace(other: Vector) = this +=other
def addInPlace(other: Vector): Vector = this +=other

def * (scale: Double): Vector = Vector(length, i => this(i) * scale)

def multiply (d: Double) = this * d
def multiply (d: Double): Vector = this * d

def / (d: Double): Vector = this * (1 / d)

def divide (d: Double) = this / d
def divide (d: Double): Vector = this / d

def unary_- = this * -1
def unary_- : Vector = this * -1

def sum = elements.reduceLeft(_ + _)
def sum: Double = elements.reduceLeft(_ + _)

def squaredDist(other: Vector): Double = {
var ans = 0.0
Expand All @@ -119,40 +119,40 @@ class Vector(val elements: Array[Double]) extends Serializable {

def dist(other: Vector): Double = math.sqrt(squaredDist(other))

override def toString = elements.mkString("(", ", ", ")")
override def toString: String = elements.mkString("(", ", ", ")")
}

object Vector {
def apply(elements: Array[Double]) = new Vector(elements)
def apply(elements: Array[Double]): Vector = new Vector(elements)

def apply(elements: Double*) = new Vector(elements.toArray)
def apply(elements: Double*): Vector = new Vector(elements.toArray)

def apply(length: Int, initializer: Int => Double): Vector = {
val elements: Array[Double] = Array.tabulate(length)(initializer)
new Vector(elements)
}

def zeros(length: Int) = new Vector(new Array[Double](length))
def zeros(length: Int): Vector = new Vector(new Array[Double](length))

def ones(length: Int) = Vector(length, _ => 1)
def ones(length: Int): Vector = Vector(length, _ => 1)

/**
* Creates this [[org.apache.spark.util.Vector]] of given length containing random numbers
* between 0.0 and 1.0. Optional scala.util.Random number generator can be provided.
*/
def random(length: Int, random: Random = new XORShiftRandom()) =
def random(length: Int, random: Random = new XORShiftRandom()): Vector =
Vector(length, _ => random.nextDouble())

class Multiplier(num: Double) {
def * (vec: Vector) = vec * num
def * (vec: Vector): Vector = vec * num
}

implicit def doubleToMultiplier(num: Double) = new Multiplier(num)
implicit def doubleToMultiplier(num: Double): Multiplier = new Multiplier(num)

implicit object VectorAccumParam extends org.apache.spark.AccumulatorParam[Vector] {
def addInPlace(t1: Vector, t2: Vector) = t1 + t2
def addInPlace(t1: Vector, t2: Vector): Vector = t1 + t2

def zero(initialValue: Vector) = Vector.zeros(initialValue.length)
def zero(initialValue: Vector): Vector = Vector.zeros(initialValue.length)
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -129,44 +129,45 @@ private[impl] case class EdgeWithLocalIds[@specialized ED](
srcId: VertexId, dstId: VertexId, localSrcId: Int, localDstId: Int, attr: ED)

private[impl] object EdgeWithLocalIds {
implicit def lexicographicOrdering[ED] = new Ordering[EdgeWithLocalIds[ED]] {
override def compare(a: EdgeWithLocalIds[ED], b: EdgeWithLocalIds[ED]): Int = {
if (a.srcId == b.srcId) {
if (a.dstId == b.dstId) 0
else if (a.dstId < b.dstId) -1
implicit def lexicographicOrdering[ED]: Ordering[EdgeWithLocalIds[ED]] =
new Ordering[EdgeWithLocalIds[ED]] {
override def compare(a: EdgeWithLocalIds[ED], b: EdgeWithLocalIds[ED]): Int = {
if (a.srcId == b.srcId) {
if (a.dstId == b.dstId) 0
else if (a.dstId < b.dstId) -1
else 1
} else if (a.srcId < b.srcId) -1
else 1
} else if (a.srcId < b.srcId) -1
else 1
}
}
}

private[graphx] def edgeArraySortDataFormat[ED]
= new SortDataFormat[EdgeWithLocalIds[ED], Array[EdgeWithLocalIds[ED]]] {
override def getKey(
data: Array[EdgeWithLocalIds[ED]], pos: Int): EdgeWithLocalIds[ED] = {
data(pos)
}
private[graphx] def edgeArraySortDataFormat[ED] = {
new SortDataFormat[EdgeWithLocalIds[ED], Array[EdgeWithLocalIds[ED]]] {
override def getKey(data: Array[EdgeWithLocalIds[ED]], pos: Int): EdgeWithLocalIds[ED] = {
data(pos)
}

override def swap(data: Array[EdgeWithLocalIds[ED]], pos0: Int, pos1: Int): Unit = {
val tmp = data(pos0)
data(pos0) = data(pos1)
data(pos1) = tmp
}
override def swap(data: Array[EdgeWithLocalIds[ED]], pos0: Int, pos1: Int): Unit = {
val tmp = data(pos0)
data(pos0) = data(pos1)
data(pos1) = tmp
}

override def copyElement(
src: Array[EdgeWithLocalIds[ED]], srcPos: Int,
dst: Array[EdgeWithLocalIds[ED]], dstPos: Int) {
dst(dstPos) = src(srcPos)
}
override def copyElement(
src: Array[EdgeWithLocalIds[ED]], srcPos: Int,
dst: Array[EdgeWithLocalIds[ED]], dstPos: Int) {
dst(dstPos) = src(srcPos)
}

override def copyRange(
src: Array[EdgeWithLocalIds[ED]], srcPos: Int,
dst: Array[EdgeWithLocalIds[ED]], dstPos: Int, length: Int) {
System.arraycopy(src, srcPos, dst, dstPos, length)
}
override def copyRange(
src: Array[EdgeWithLocalIds[ED]], srcPos: Int,
dst: Array[EdgeWithLocalIds[ED]], dstPos: Int, length: Int) {
System.arraycopy(src, srcPos, dst, dstPos, length)
}

override def allocate(length: Int): Array[EdgeWithLocalIds[ED]] = {
new Array[EdgeWithLocalIds[ED]](length)
override def allocate(length: Int): Array[EdgeWithLocalIds[ED]] = {
new Array[EdgeWithLocalIds[ED]](length)
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,8 @@ object ShippableVertexPartition {
* Implicit conversion to allow invoking `VertexPartitionBase` operations directly on a
* `ShippableVertexPartition`.
*/
implicit def shippablePartitionToOps[VD: ClassTag](partition: ShippableVertexPartition[VD]) =
new ShippableVertexPartitionOps(partition)
implicit def shippablePartitionToOps[VD: ClassTag](partition: ShippableVertexPartition[VD])
: ShippableVertexPartitionOps[VD] = new ShippableVertexPartitionOps(partition)

/**
* Implicit evidence that `ShippableVertexPartition` is a member of the
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@ private[graphx] object VertexPartition {
* Implicit conversion to allow invoking `VertexPartitionBase` operations directly on a
* `VertexPartition`.
*/
implicit def partitionToOps[VD: ClassTag](partition: VertexPartition[VD]) =
new VertexPartitionOps(partition)
implicit def partitionToOps[VD: ClassTag](partition: VertexPartition[VD])
: VertexPartitionOps[VD] = new VertexPartitionOps(partition)

/**
* Implicit evidence that `VertexPartition` is a member of the `VertexPartitionBaseOpsConstructor`
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -238,8 +238,8 @@ private[graphx] abstract class VertexPartitionBaseOps
* because these methods return a `Self` and this implicit conversion re-wraps that in a
* `VertexPartitionBaseOps`. This relies on the context bound on `Self`.
*/
private implicit def toOps[VD2: ClassTag](
partition: Self[VD2]): VertexPartitionBaseOps[VD2, Self] = {
private implicit def toOps[VD2: ClassTag](partition: Self[VD2])
: VertexPartitionBaseOps[VD2, Self] = {
implicitly[VertexPartitionBaseOpsConstructor[Self]].toOps(partition)
}
}