Skip to content

Commit

Permalink
remove the over from the functions.scala
Browse files Browse the repository at this point in the history
  • Loading branch information
chenghao-intel committed May 21, 2015
1 parent 964c013 commit 53f89f2
Show file tree
Hide file tree
Showing 2 changed files with 68 additions and 4 deletions.
57 changes: 55 additions & 2 deletions sql/core/src/main/scala/org/apache/spark/sql/functions.scala
Original file line number Diff line number Diff line change
Expand Up @@ -446,6 +446,61 @@ object functions {
UnresolvedWindowFunction("lead", e.expr :: Literal(count) :: Literal(defaultValue) :: Nil)
}

/**
* Returns a new [[WindowFunctionDefinition]] partitioned by the specified column.
* For example:
* {{{
* // The following 2 are equivalent
* partitionBy("k1", "k2").orderBy("k3")
* partitionBy($"K1", $"k2").orderBy($"k3")
* }}}
* @group window_funcs
*/
@scala.annotation.varargs
def partitionBy(colName: String, colNames: String*): WindowFunctionDefinition = {
new WindowFunctionDefinition().partitionBy(colName, colNames: _*)
}

/**
* Returns a new [[WindowFunctionDefinition]] partitioned by the specified column.
* For example:
* {{{
* partitionBy($"col1", $"col2").orderBy("value")
* }}}
* @group window_funcs
*/
@scala.annotation.varargs
def partitionBy(cols: Column*): WindowFunctionDefinition = {
new WindowFunctionDefinition().partitionBy(cols: _*)
}

/**
* Create a new [[WindowFunctionDefinition]] sorted by the specified columns.
* For example:
* {{{
* // The following 2 are equivalent
* orderBy("k2", "k3").partitionBy("k1")
* orderBy($"k2", $"k3").partitionBy("k1")
* }}}
* @group window_funcs
*/
@scala.annotation.varargs
def orderBy(colName: String, colNames: String*): WindowFunctionDefinition = {
new WindowFunctionDefinition().orderBy(colName, colNames: _*)
}

/**
* Returns a new [[WindowFunctionDefinition]] sorted by the specified columns.
* For example
* {{{
* val w = orderBy($"k2", $"k3").partitionBy("k1")
* }}}
* @group window_funcs
*/
def orderBy(cols: Column*): WindowFunctionDefinition = {
new WindowFunctionDefinition().orderBy(cols: _*)
}

//////////////////////////////////////////////////////////////////////////////////////////////
// Non-aggregate functions
//////////////////////////////////////////////////////////////////////////////////////////////
Expand Down Expand Up @@ -1519,6 +1574,4 @@ object functions {
UnresolvedFunction(udfName, cols.map(_.expr))
}

def over: WindowFunctionDefinition = new WindowFunctionDefinition()

}
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,20 @@ import org.apache.spark.sql.hive.test.TestHive.implicits._

class HiveDataFrameWindowSuite extends QueryTest {

test("reuse window") {
test("reuse window partitionBy") {
val df = Seq((1, "1"), (2, "2"), (1, "1"), (2, "2")).toDF("key", "value")
val w = over.partitionBy("key").orderBy("value")
val w = partitionBy("key").orderBy("value")

checkAnswer(
df.select(
lead("key").over(w).toColumn,
lead("value").over(w).toColumn),
Row(1, "1") :: Row(2, "2") :: Row(null, null) :: Row(null, null) :: Nil)
}

test("reuse window orderBy") {
val df = Seq((1, "1"), (2, "2"), (1, "1"), (2, "2")).toDF("key", "value")
val w = orderBy("value").partitionBy("key")

checkAnswer(
df.select(
Expand Down

0 comments on commit 53f89f2

Please sign in to comment.