Skip to content

Commit

Permalink
Fix to test all.
Browse files Browse the repository at this point in the history
  • Loading branch information
dongjoon-hyun committed Feb 2, 2018
1 parent 4512d72 commit 9a2f640
Showing 1 changed file with 5 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -97,25 +97,23 @@ class FileBasedDataSourceSuite extends QueryTest with SharedSQLContext {
}
}

// Only ORC/Parquet support this.
Seq("orc", "parquet").foreach { format =>
allFileBasedDataSources.foreach { format =>
testQuietly(s"Enabling/disabling ignoreMissingFiles using $format") {
def testIgnoreMissingFiles(): Unit = {
withTempDir { dir =>
val basePath = dir.getCanonicalPath
spark.range(1).toDF("a").write.format(format).save(new Path(basePath, "first").toString)
spark.range(1, 2).toDF("a").write.format(format)
.save(new Path(basePath, "second").toString)
Seq("0").toDF("a").write.format(format).save(new Path(basePath, "first").toString)
Seq("1").toDF("a").write.format(format).save(new Path(basePath, "second").toString)
val thirdPath = new Path(basePath, "third")
spark.range(2, 3).toDF("a").write.format(format).save(thirdPath.toString)
Seq("2").toDF("a").write.format(format).save(thirdPath.toString)
val df = spark.read.format(format).load(
new Path(basePath, "first").toString,
new Path(basePath, "second").toString,
new Path(basePath, "third").toString)

val fs = thirdPath.getFileSystem(spark.sparkContext.hadoopConfiguration)
assert(fs.delete(thirdPath, true))
checkAnswer(df, Seq(Row(0), Row(1)))
checkAnswer(df, Seq(Row("0"), Row("1")))
}
}

Expand Down

0 comments on commit 9a2f640

Please sign in to comment.