From e7c2c747018a17c3ab3371b6474c639f0007d81f Mon Sep 17 00:00:00 2001 From: Zhi Lin Date: Mon, 10 Jul 2023 10:50:14 +0800 Subject: [PATCH] [Follow-up] Fix log4j and some dependencies for `shims-common` (#362) (#363) * log4j and some fix in common * remove redirectLog() * nit Co-authored-by: Yizhong Zhang --- core/pom.xml | 4 +++ core/raydp-main/pom.xml | 4 +++ .../apache/spark/executor/RayDPExecutor.scala | 23 ------------ core/shims/common/pom.xml | 35 +++++++++++++++++-- core/shims/spark322/pom.xml | 4 +++ 5 files changed, 45 insertions(+), 25 deletions(-) diff --git a/core/pom.xml b/core/pom.xml index baeb3494..9283a3bd 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -63,6 +63,10 @@ org.apache.ivy ivy + + log4j + log4j + diff --git a/core/raydp-main/pom.xml b/core/raydp-main/pom.xml index 69122da6..96bd04ee 100644 --- a/core/raydp-main/pom.xml +++ b/core/raydp-main/pom.xml @@ -61,6 +61,10 @@ org.apache.ivy ivy + + log4j + log4j + diff --git a/core/raydp-main/src/main/scala/org/apache/spark/executor/RayDPExecutor.scala b/core/raydp-main/src/main/scala/org/apache/spark/executor/RayDPExecutor.scala index b873f091..0ed699dd 100644 --- a/core/raydp-main/src/main/scala/org/apache/spark/executor/RayDPExecutor.scala +++ b/core/raydp-main/src/main/scala/org/apache/spark/executor/RayDPExecutor.scala @@ -30,7 +30,6 @@ import io.ray.runtime.config.RayConfig import org.apache.arrow.vector.ipc.{ArrowStreamWriter, WriteChannel} import org.apache.arrow.vector.ipc.message.{IpcOption, MessageSerializer} import org.apache.arrow.vector.types.pojo.Schema -import org.apache.log4j.{FileAppender => Log4jFileAppender, _} import org.apache.spark._ import org.apache.spark.deploy.SparkHadoopUtil @@ -111,7 +110,6 @@ class RayDPExecutor( } createWorkingDir(appId) setUserDir() - // redirectLog() val userClassPath = classPathEntries.split(java.io.File.pathSeparator) .filter(_.nonEmpty).map(new File(_).toURI.toURL) @@ -251,27 +249,6 @@ class RayDPExecutor( } } - def redirectLog(): Unit = { - val logFile = Paths.get(workingDir.getAbsolutePath, s"executor${executorId}.out") - val errorFile = Paths.get(workingDir.getAbsolutePath, s"executor${executorId}.err") - logInfo(s"Redirect executor log to ${logFile.toString}") - val appenders = LogManager.getRootLogger.getAllAppenders - // There should be a console appender. Use its layout. - val defaultAppender = appenders.nextElement().asInstanceOf[Appender] - val layout = defaultAppender.getLayout - - val out = new Log4jFileAppender(layout, logFile.toString) - out.setName("outfile") - - val err = new Log4jFileAppender(layout, errorFile.toString()) - err.setName("errfile") - err.setThreshold(Level.ERROR) - - LogManager.getRootLogger.addAppender(out) - LogManager.getRootLogger.addAppender(err) - LogManager.getRootLogger.removeAppender(defaultAppender) - } - def createTemporaryRpcEnv( name: String, conf: SparkConf): Unit = { diff --git a/core/shims/common/pom.xml b/core/shims/common/pom.xml index 55329693..9e7e32f5 100644 --- a/core/shims/common/pom.xml +++ b/core/shims/common/pom.xml @@ -66,11 +66,32 @@ provided - org.apache.commons - commons-text + org.xerial.snappy + snappy-java + + + org.apache.commons + commons-compress + + + org.apache.commons + commons-text + + + org.apache.ivy + ivy + + + log4j + log4j + + org.xerial.snappy + snappy-java + ${snappy.version} + org.apache.commons commons-text @@ -81,5 +102,15 @@ protobuf-java ${protobuf.version} + + org.apache.ivy + ivy + ${ivy.version} + + + org.apache.commons + commons-compress + ${commons.compress.version} + diff --git a/core/shims/spark322/pom.xml b/core/shims/spark322/pom.xml index 5a322d55..6e0999ac 100644 --- a/core/shims/spark322/pom.xml +++ b/core/shims/spark322/pom.xml @@ -97,6 +97,10 @@ org.apache.ivy ivy + + log4j + log4j +