Skip to content

Commit

Permalink
block more data source related property
Browse files Browse the repository at this point in the history
  • Loading branch information
WangTaoTheTonic committed May 22, 2015
1 parent 92a81fa commit e4f0feb
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 11 deletions.
2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1884,7 +1884,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
*
* @param f the closure to clean
* @param checkSerializable whether or not to immediately check <tt>f</tt> for serializability
* @throws <tt>SparkException<tt> if <tt>checkSerializable</tt> is set but <tt>f</tt> is not
* @throws SparkException if <tt>checkSerializable</tt> is set but <tt>f</tt> is not
* serializable
*/
private[spark] def clean[F <: AnyRef](f: F, checkSerializable: Boolean = true): F = {
Expand Down
24 changes: 14 additions & 10 deletions sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,13 @@ package org.apache.spark.sql.hive

import java.io.{BufferedReader, File, InputStreamReader, PrintStream}
import java.sql.Timestamp
import java.util.{ArrayList => JArrayList}
import java.util.{ArrayList => JArrayList, Properties}

import org.apache.hadoop.hive.ql.parse.VariableSubstitution
import org.apache.spark.sql.catalyst.ParserDialect

import scala.collection.JavaConversions._
import scala.collection.mutable.HashMap
import scala.language.implicitConversions

import org.apache.hadoop.fs.{FileSystem, Path}
Expand Down Expand Up @@ -153,7 +154,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
* Hive 13 as this is the version of Hive that is packaged with Spark SQL. This copy of the
* client is used for execution related tasks like registering temporary functions or ensuring
* that the ThreadLocal SessionState is correctly populated. This copy of Hive is *not* used
* for storing peristent metadata, and only point to a dummy metastore in a temporary directory.
* for storing persistent metadata, and only point to a dummy metastore in a temporary directory.
*/
@transient
protected[hive] lazy val executionHive: ClientWrapper = {
Expand Down Expand Up @@ -507,14 +508,17 @@ private[hive] object HiveContext {
def newTemporaryConfiguration(): Map[String, String] = {
val tempDir = Utils.createTempDir()
val localMetastore = new File(tempDir, "metastore").getAbsolutePath
Map(
"datanucleus.rdbms.datastoreAdapterClassName" ->
"org.datanucleus.store.rdbms.adapter.DerbyAdapter",
"javax.jdo.option.ConnectionDriverName" -> "org.apache.derby.jdbc.EmbeddedDriver",
"javax.jdo.option.ConnectionPassword" -> "mine",
"javax.jdo.option.ConnectionURL" -> s"jdbc:derby:;databaseName=$localMetastore;create=true",
"javax.jdo.option.ConnectionUserName" -> "APP"
)
val propMap: HashMap[String, String] = HashMap()
HiveConf.ConfVars.values().foreach { confvar =>
if (confvar.varname.contains("datanucleus") || confvar.varname.contains("jdo")) {
propMap.put(confvar.varname, confvar.defaultVal)
}
}
propMap.put("javax.jdo.option.ConnectionURL",
s"jdbc:derby:;databaseName=$localMetastore;create=true")
propMap.put("datanucleus.rdbms.datastoreAdapterClassName",
"org.datanucleus.store.rdbms.adapter.DerbyAdapter")
propMap.toMap
}

protected val primitiveTypes =
Expand Down

0 comments on commit e4f0feb

Please sign in to comment.