diff --git a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala index 56cc3daecb3a6..c55b29b15051d 100644 --- a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala @@ -314,7 +314,8 @@ class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with Matchers all (directSiteRelativeLinks) should not startWith (knoxBaseUrl) } - test("static relative links are prefixed with uiRoot (spark.ui.proxyBase)") { + // TODO (SPARK-31723): re-enable it + ignore("static relative links are prefixed with uiRoot (spark.ui.proxyBase)") { val uiRoot = Option(System.getenv("APPLICATION_WEB_PROXY_BASE")).getOrElse("/testwebproxybase") val page = new HistoryPage(server) val request = mock[HttpServletRequest] diff --git a/core/src/test/scala/org/apache/spark/scheduler/BarrierTaskContextSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/BarrierTaskContextSuite.scala index c4e5e7c700652..17bc3391cf946 100644 --- a/core/src/test/scala/org/apache/spark/scheduler/BarrierTaskContextSuite.scala +++ b/core/src/test/scala/org/apache/spark/scheduler/BarrierTaskContextSuite.scala @@ -39,7 +39,8 @@ class BarrierTaskContextSuite extends SparkFunSuite with LocalSparkContext with sc = new SparkContext(conf) } - test("global sync by barrier() call") { + // TODO (SPARK-31730): re-enable it + ignore("global sync by barrier() call") { initLocalClusterSparkContext() val rdd = sc.makeRDD(1 to 10, 4) val rdd2 = rdd.barrier().mapPartitions { it => @@ -131,7 +132,8 @@ class BarrierTaskContextSuite extends SparkFunSuite with LocalSparkContext with assert(times2.max - times2.min <= 1000) } - test("support multiple barrier() call within a single task") { + // TODO (SPARK-31730): re-enable it + ignore("support multiple barrier() call within a single task") { initLocalClusterSparkContext() val rdd = sc.makeRDD(1 to 10, 4) val rdd2 = rdd.barrier().mapPartitions { it => diff --git a/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaMicroBatchSourceSuite.scala b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaMicroBatchSourceSuite.scala index a4601b91af0d6..bdad214a91343 100644 --- a/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaMicroBatchSourceSuite.scala +++ b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaMicroBatchSourceSuite.scala @@ -349,7 +349,8 @@ abstract class KafkaMicroBatchSourceSuiteBase extends KafkaSourceSuiteBase { ) } - test("subscribing topic by pattern with topic deletions") { + // TODO (SPARK-31731): re-enable it + ignore("subscribing topic by pattern with topic deletions") { val topicPrefix = newTopic() val topic = topicPrefix + "-seems" val topic2 = topicPrefix + "-bad" diff --git a/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaRelationSuite.scala b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaRelationSuite.scala index 32d056140a0d7..e5f3a229622e1 100644 --- a/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaRelationSuite.scala +++ b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaRelationSuite.scala @@ -179,7 +179,8 @@ abstract class KafkaRelationSuiteBase extends QueryTest with SharedSparkSession ("3", Seq(("e", "f".getBytes(UTF_8)), ("e", "g".getBytes(UTF_8))))).toDF) } - test("timestamp provided for starting and ending") { + // TODO (SPARK-31729): re-enable it + ignore("timestamp provided for starting and ending") { val (topic, timestamps) = prepareTimestampRelatedUnitTest // timestamp both presented: starting "first" ending "finalized" diff --git a/external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/DirectKafkaStreamSuite.scala b/external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/DirectKafkaStreamSuite.scala index 925327d9d58e6..72cf3e8118228 100644 --- a/external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/DirectKafkaStreamSuite.scala +++ b/external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/DirectKafkaStreamSuite.scala @@ -332,7 +332,8 @@ class DirectKafkaStreamSuite } // Test to verify the offset ranges can be recovered from the checkpoints - test("offset recovery") { + // TODO (SPARK-31722): re-enable it + ignore("offset recovery") { val topic = "recovery" kafkaTestUtils.createTopic(topic) testDir = Utils.createTempDir() @@ -418,8 +419,9 @@ class DirectKafkaStreamSuite ssc.stop() } - // Test to verify the offsets can be recovered from Kafka - test("offset recovery from kafka") { + // Test to verify the offsets can be recovered from Kafka + // TODO (SPARK-31722): re-enable it + ignore("offset recovery from kafka") { val topic = "recoveryfromkafka" kafkaTestUtils.createTopic(topic) diff --git a/streaming/src/test/scala/org/apache/spark/streaming/StreamingContextSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/StreamingContextSuite.scala index 1d6637861511f..4eff464dcdafb 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/StreamingContextSuite.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/StreamingContextSuite.scala @@ -293,7 +293,8 @@ class StreamingContextSuite } } - test("stop gracefully") { + // TODO (SPARK-31728): re-enable it + ignore("stop gracefully") { val conf = new SparkConf().setMaster(master).setAppName(appName) conf.set("spark.dummyTimeConfig", "3600s") val sc = new SparkContext(conf)