From e6f35d2991e6128288072cca457412d4b1605fdd Mon Sep 17 00:00:00 2001 From: Shuai Lin Date: Fri, 27 Jan 2017 00:18:17 +0000 Subject: [PATCH] Fix k8s integration tests (#44) * Fixed k8s integration test - Enable spark ui explicitly for in-process submit - Fixed some broken assertions in integration tests - Fixed a scalastyle error in SparkDockerImageBuilder.scala - Log into target/integration-tests.log like other modules * Fixed line length. * CR --- .../src/test/resources/log4j.properties | 31 +++++++++++++++++++ .../integrationtest/KubernetesSuite.scala | 19 ++++++++++-- .../docker/SparkDockerImageBuilder.scala | 3 +- 3 files changed, 49 insertions(+), 4 deletions(-) create mode 100644 resource-managers/kubernetes/integration-tests/src/test/resources/log4j.properties diff --git a/resource-managers/kubernetes/integration-tests/src/test/resources/log4j.properties b/resource-managers/kubernetes/integration-tests/src/test/resources/log4j.properties new file mode 100644 index 0000000000000..866126bc3c1c2 --- /dev/null +++ b/resource-managers/kubernetes/integration-tests/src/test/resources/log4j.properties @@ -0,0 +1,31 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Set everything to be logged to the file target/integration-tests.log +log4j.rootCategory=INFO, file +log4j.appender.file=org.apache.log4j.FileAppender +log4j.appender.file.append=true +log4j.appender.file.file=target/integration-tests.log +log4j.appender.file.layout=org.apache.log4j.PatternLayout +log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n + +# Ignore messages below warning level from a few verbose libraries. +log4j.logger.com.sun.jersey=WARN +log4j.logger.org.apache.hadoop=WARN +log4j.logger.org.eclipse.jetty=WARN +log4j.logger.org.mortbay=WARN +log4j.logger.org.spark_project.jetty=WARN diff --git a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesSuite.scala b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesSuite.scala index 6a92ae1cba49f..c4bb389f5ada2 100644 --- a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesSuite.scala +++ b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesSuite.scala @@ -36,6 +36,7 @@ import org.apache.spark.deploy.kubernetes.Client import org.apache.spark.deploy.kubernetes.integrationtest.docker.SparkDockerImageBuilder import org.apache.spark.deploy.kubernetes.integrationtest.minikube.Minikube import org.apache.spark.deploy.kubernetes.integrationtest.restapis.SparkRestApiV1 +import org.apache.spark.internal.Logging import org.apache.spark.status.api.v1.{ApplicationStatus, StageStatus} import org.apache.spark.util.Utils @@ -82,8 +83,15 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter { before { Eventually.eventually(TIMEOUT, INTERVAL) { - assert(minikubeKubernetesClient.pods().list().getItems.isEmpty) - assert(minikubeKubernetesClient.services().list().getItems.isEmpty) + val podsList = minikubeKubernetesClient.pods().list() + assert(podsList == null + || podsList.getItems == null + || podsList.getItems.isEmpty + ) + val servicesList = minikubeKubernetesClient.services().list() + assert(servicesList == null + || servicesList.getItems == null + || servicesList.getItems.isEmpty) } } @@ -139,6 +147,9 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter { } test("Run a simple example") { + // We'll make assertions based on spark rest api, so we need to turn on + // spark.ui.enabled explicitly since the scalatest-maven-plugin would set it + // to false by default. val sparkConf = new SparkConf(true) .setMaster(s"k8s://https://${Minikube.getMinikubeIp}:8443") .set("spark.kubernetes.submit.caCertFile", clientConfig.getCaCertFile) @@ -152,6 +163,8 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter { .set("spark.executor.cores", "1") .set("spark.executors.instances", "1") .set("spark.app.id", "spark-pi") + .set("spark.ui.enabled", "true") + .set("spark.testing", "false") val mainAppResource = s"file://$EXAMPLES_JAR" new Client( @@ -174,6 +187,8 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter { "--num-executors", "1", "--upload-jars", HELPER_JAR, "--class", MAIN_CLASS, + "--conf", "spark.ui.enabled=true", + "--conf", "spark.testing=false", "--conf", s"spark.kubernetes.submit.caCertFile=${clientConfig.getCaCertFile}", "--conf", s"spark.kubernetes.submit.clientKeyFile=${clientConfig.getClientKeyFile}", "--conf", s"spark.kubernetes.submit.clientCertFile=${clientConfig.getClientCertFile}", diff --git a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/docker/SparkDockerImageBuilder.scala b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/docker/SparkDockerImageBuilder.scala index 22d78142508c1..1aa6a7b7e70c2 100644 --- a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/docker/SparkDockerImageBuilder.scala +++ b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/docker/SparkDockerImageBuilder.scala @@ -55,5 +55,4 @@ private[spark] class SparkDockerImageBuilder(private val dockerEnv: Map[String, dockerClient.build(Paths.get("target", "docker", "driver"), "spark-driver") dockerClient.build(Paths.get("target", "docker", "executor"), "spark-executor") } - -} \ No newline at end of file +}