Skip to content

Commit

Permalink
[SPARK-5586][Spark Shell][SQL] Make sqlContext available in spark s…
Browse files Browse the repository at this point in the history
…hell

Result is like this
```
15/02/05 13:41:22 INFO SparkILoop: Created spark context..
Spark context available as sc.
15/02/05 13:41:22 INFO SparkILoop: Created sql context..
SQLContext available as sqlContext.

scala> sq
sql          sqlContext   sqlParser    sqrt
```

Author: OopsOutOfMemory <victorshengli@126.com>

Closes #4387 from OopsOutOfMemory/sqlContextInShell and squashes the following commits:

c7f5203 [OopsOutOfMemory] auto-import sql() function
e160697 [OopsOutOfMemory] Merge branch 'sqlContextInShell' of https://github.com/OopsOutOfMemory/spark into sqlContextInShell
37c0a16 [OopsOutOfMemory] auto detect hive support
a9c59d9 [OopsOutOfMemory] rename and reduce range of imports
6b9e309 [OopsOutOfMemory] Merge branch 'master' into sqlContextInShell
cae652f [OopsOutOfMemory] make sqlContext available in spark shell
  • Loading branch information
OopsOutOfMemory authored and marmbrus committed Feb 6, 2015
1 parent 4793c84 commit 3d3ecd7
Show file tree
Hide file tree
Showing 4 changed files with 58 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse}
import org.apache.spark.Logging
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
import org.apache.spark.util.Utils

/** The Scala interactive shell. It provides a read-eval-print loop
Expand Down Expand Up @@ -130,6 +131,7 @@ class SparkILoop(
// NOTE: Must be public for visibility
@DeveloperApi
var sparkContext: SparkContext = _
var sqlContext: SQLContext = _

override def echoCommandMessage(msg: String) {
intp.reporter printMessage msg
Expand Down Expand Up @@ -1016,6 +1018,23 @@ class SparkILoop(
sparkContext
}

@DeveloperApi
def createSQLContext(): SQLContext = {
val name = "org.apache.spark.sql.hive.HiveContext"
val loader = Utils.getContextOrSparkClassLoader
try {
sqlContext = loader.loadClass(name).getConstructor(classOf[SparkContext])
.newInstance(sparkContext).asInstanceOf[SQLContext]
logInfo("Created sql context (with Hive support)..")
}
catch {
case cnf: java.lang.ClassNotFoundException =>
sqlContext = new SQLContext(sparkContext)
logInfo("Created sql context..")
}
sqlContext
}

private def getMaster(): String = {
val master = this.master match {
case Some(m) => m
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,17 @@ private[repl] trait SparkILoopInit {
_sc
}
""")
command("""
@transient val sqlContext = {
val _sqlContext = org.apache.spark.repl.Main.interp.createSQLContext()
println("SQL context available as sqlContext.")
_sqlContext
}
""")
command("import org.apache.spark.SparkContext._")
command("import sqlContext.implicits._")
command("import sqlContext.sql")
command("import org.apache.spark.sql.Dsl._")
}
}

Expand Down
18 changes: 18 additions & 0 deletions repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package org.apache.spark.repl

import org.apache.spark.util.Utils
import org.apache.spark._
import org.apache.spark.sql.SQLContext

import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.SparkILoop
Expand All @@ -34,6 +35,7 @@ object Main extends Logging {
"-Yrepl-outdir", s"${outputDir.getAbsolutePath}", "-Yrepl-sync"), true)
val classServer = new HttpServer(conf, outputDir, new SecurityManager(conf))
var sparkContext: SparkContext = _
var sqlContext: SQLContext = _
var interp = new SparkILoop // this is a public var because tests reset it.

def main(args: Array[String]) {
Expand Down Expand Up @@ -74,6 +76,22 @@ object Main extends Logging {
sparkContext
}

def createSQLContext(): SQLContext = {
val name = "org.apache.spark.sql.hive.HiveContext"
val loader = Utils.getContextOrSparkClassLoader
try {
sqlContext = loader.loadClass(name).getConstructor(classOf[SparkContext])
.newInstance(sparkContext).asInstanceOf[SQLContext]
logInfo("Created sql context (with Hive support)..")
}
catch {
case cnf: java.lang.ClassNotFoundException =>
sqlContext = new SQLContext(sparkContext)
logInfo("Created sql context..")
}
sqlContext
}

private def getMaster: String = {
val master = {
val envMaster = sys.env.get("MASTER")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,18 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
println("Spark context available as sc.")
_sc
}
""")
""")
command( """
@transient val sqlContext = {
val _sqlContext = org.apache.spark.repl.Main.createSQLContext()
println("SQL context available as sqlContext.")
_sqlContext
}
""")
command("import org.apache.spark.SparkContext._")
command("import sqlContext.implicits._")
command("import sqlContext.sql")
command("import org.apache.spark.sql.Dsl._")
}
}

Expand Down

0 comments on commit 3d3ecd7

Please sign in to comment.