Skip to content

Commit

Permalink
eliminate the error log
Browse files Browse the repository at this point in the history
  • Loading branch information
chenghao-intel committed Sep 11, 2014
1 parent c27718f commit 74fd76b
Showing 1 changed file with 8 additions and 9 deletions.
17 changes: 8 additions & 9 deletions sql/hive/src/main/scala/org/apache/spark/sql/hive/TestHive.scala
Original file line number Diff line number Diff line change
Expand Up @@ -376,15 +376,6 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
log.asInstanceOf[org.apache.log4j.Logger].setLevel(org.apache.log4j.Level.WARN)
}

// It is important that we RESET first as broken hooks that might have been set could break
// other sql exec here.
runSqlHive("RESET")
// For some reason, RESET does not reset the following variables...
runSqlHive("set datanucleus.cache.collections=true")
runSqlHive("set datanucleus.cache.collections.lazy=true")
// Lots of tests fail if we do not change the partition whitelist from the default.
runSqlHive("set hive.metastore.partition.name.whitelist.pattern=.*")

loadedTables.clear()
catalog.client.getAllTables("default").foreach { t =>
logDebug(s"Deleting table $t")
Expand All @@ -410,6 +401,14 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
FunctionRegistry.unregisterTemporaryUDF(udfName)
}

// It is important that we RESET first as broken hooks that might have been set could break
// other sql exec here.
runSqlHive("RESET")
// For some reason, RESET does not reset the following variables...
runSqlHive("set datanucleus.cache.collections=true")
runSqlHive("set datanucleus.cache.collections.lazy=true")
// Lots of tests fail if we do not change the partition whitelist from the default.
runSqlHive("set hive.metastore.partition.name.whitelist.pattern=.*")
configure()

runSqlHive("USE default")
Expand Down

0 comments on commit 74fd76b

Please sign in to comment.