Skip to content

Commit

Permalink
TEST-SPARK-29015
Browse files Browse the repository at this point in the history
  • Loading branch information
AngersZhuuuu committed Sep 18, 2019
1 parent 91d1031 commit 6dc61e7
Show file tree
Hide file tree
Showing 2 changed files with 62 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,34 @@ class CliSuite extends SparkFunSuite with BeforeAndAfterAll with Logging {
)
}

test("Commands using SerDe provided jars in conf hive.aux.jars.path") {

val dataFilePath =
Thread.currentThread().getContextClassLoader.getResource("data/files/small_kv.txt")
val hiveContribJar = HiveTestUtils.getHiveContribJar.getCanonicalPath

runCliWithin(
3.minute,
Seq("--conf", s"spark.hadoop.${ConfVars.HIVEAUXJARS}=$hiveContribJar"))(
"""CREATE TABLE addJarWithHiveAux(key string, val string)
|ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe';
""".stripMargin
-> "",
"CREATE TABLE sourceTableForWithHiveAux (key INT, val STRING);"
-> "",
s"LOAD DATA LOCAL INPATH '$dataFilePath' OVERWRITE INTO TABLE sourceTableForWithHiveAux;"
-> "",
"INSERT INTO TABLE addJarWithHiveAux SELECT key, val FROM sourceTableForWithHiveAux;"
-> "",
"SELECT collect_list(array(val)) FROM addJarWithHiveAux;"
-> """[["val_238"],["val_86"],["val_311"],["val_27"],["val_165"]]""",
"DROP TABLE addJarWithHiveAux;"
-> "",
"DROP TABLE sourceTableForWithHiveAux;"
-> ""
)
}

test("SPARK-11188 Analysis error reporting") {
runCliWithin(timeout = 2.minute,
errorResponses = Seq("AnalysisException"))(
Expand Down Expand Up @@ -332,4 +360,31 @@ class CliSuite extends SparkFunSuite with BeforeAndAfterAll with Logging {
"SELECT concat_ws(',', 'First', example_max(1234321), 'Third');" -> "First,1234321,Third"
)
}

test("SPARK-29022 Commands using SerDe provided in ADD JAR sql") {
val dataFilePath =
Thread.currentThread().getContextClassLoader.getResource("data/files/small_kv.txt")
val hiveContribJar = HiveTestUtils.getHiveContribJar.getCanonicalPath

runCliWithin(
3.minute)(
s"ADD JAR ${hiveContribJar};" -> "",
"""CREATE TABLE addJarWithSQL(key string, val string)
|ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe';
""".stripMargin
-> "",
"CREATE TABLE sourceTableForWithSQL(key INT, val STRING);"
-> "",
s"LOAD DATA LOCAL INPATH '$dataFilePath' OVERWRITE INTO TABLE sourceTableForWithSQL;"
-> "",
"INSERT INTO TABLE addJarWithSQL SELECT key, val FROM sourceTableForWithSQL;"
-> "",
"SELECT collect_list(array(val)) FROM addJarWithSQL;"
-> """[["val_238"],["val_86"],["val_311"],["val_27"],["val_165"]]""",
"DROP TABLE addJarWithSQL;"
-> "",
"DROP TABLE sourceTableForWithSQL;"
-> ""
)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,13 @@ private[hive] class HiveClientImpl(
warehouseDir.foreach { dir =>
ret.getConf.setVar(ConfVars.METASTOREWAREHOUSE, dir)
}
// ret != null means we have a CliSessionState instance in current thread which initialized
// by SparkSQLCLIDriver. The class loader of CliSessionState's conf is current main thread's
// class loader used to load jars passed by --jars. One class loader used by AddJarCommand
// is clientLoader.classLoader which contain jar path passed by --jars in main thread.
// We set CliSessionState's conf class loader to clientLoader.classLoader. Thus we can load
// all jars passed by --jars and AddJarCommand.
ret.getConf.setClassLoader(clientLoader.classLoader)
ret
} else {
newState()
Expand Down

0 comments on commit 6dc61e7

Please sign in to comment.