diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index cf3820fcb6a35..b5ab12300d0e5 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -1884,7 +1884,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
*
* @param f the closure to clean
* @param checkSerializable whether or not to immediately check f for serializability
- * @throws SparkException if checkSerializable is set but f is not
+ * @throws SparkException if checkSerializable is set but f is not
* serializable
*/
private[spark] def clean[F <: AnyRef](f: F, checkSerializable: Boolean = true): F = {
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index da332c98b752c..1d1596268f1d2 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -19,12 +19,13 @@ package org.apache.spark.sql.hive
import java.io.{BufferedReader, File, InputStreamReader, PrintStream}
import java.sql.Timestamp
-import java.util.{ArrayList => JArrayList}
+import java.util.{ArrayList => JArrayList, Properties}
import org.apache.hadoop.hive.ql.parse.VariableSubstitution
import org.apache.spark.sql.catalyst.ParserDialect
import scala.collection.JavaConversions._
+import scala.collection.mutable.HashMap
import scala.language.implicitConversions
import org.apache.hadoop.fs.{FileSystem, Path}
@@ -153,7 +154,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
* Hive 13 as this is the version of Hive that is packaged with Spark SQL. This copy of the
* client is used for execution related tasks like registering temporary functions or ensuring
* that the ThreadLocal SessionState is correctly populated. This copy of Hive is *not* used
- * for storing peristent metadata, and only point to a dummy metastore in a temporary directory.
+ * for storing persistent metadata, and only point to a dummy metastore in a temporary directory.
*/
@transient
protected[hive] lazy val executionHive: ClientWrapper = {
@@ -507,14 +508,17 @@ private[hive] object HiveContext {
def newTemporaryConfiguration(): Map[String, String] = {
val tempDir = Utils.createTempDir()
val localMetastore = new File(tempDir, "metastore").getAbsolutePath
- Map(
- "datanucleus.rdbms.datastoreAdapterClassName" ->
- "org.datanucleus.store.rdbms.adapter.DerbyAdapter",
- "javax.jdo.option.ConnectionDriverName" -> "org.apache.derby.jdbc.EmbeddedDriver",
- "javax.jdo.option.ConnectionPassword" -> "mine",
- "javax.jdo.option.ConnectionURL" -> s"jdbc:derby:;databaseName=$localMetastore;create=true",
- "javax.jdo.option.ConnectionUserName" -> "APP"
- )
+ val propMap: HashMap[String, String] = HashMap()
+ HiveConf.ConfVars.values().foreach { confvar =>
+ if (confvar.varname.contains("datanucleus") || confvar.varname.contains("jdo")) {
+ propMap.put(confvar.varname, confvar.defaultVal)
+ }
+ }
+ propMap.put("javax.jdo.option.ConnectionURL",
+ s"jdbc:derby:;databaseName=$localMetastore;create=true")
+ propMap.put("datanucleus.rdbms.datastoreAdapterClassName",
+ "org.datanucleus.store.rdbms.adapter.DerbyAdapter")
+ propMap.toMap
}
protected val primitiveTypes =