From 9bdffb01d13647fbd8e07eed800a210b9ce28a85 Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Mon, 18 Aug 2014 15:42:22 -0700 Subject: [PATCH] Undo sbt build changes. --- pom.xml | 15 +++++ project/Relocator.scala | 133 --------------------------------------- project/SparkBuild.scala | 61 ++---------------- 3 files changed, 21 insertions(+), 188 deletions(-) delete mode 100644 project/Relocator.scala diff --git a/pom.xml b/pom.xml index 7e38c2d1b3d42..cadf7e32cdb3a 100644 --- a/pom.xml +++ b/pom.xml @@ -1018,6 +1018,21 @@ + + + sbt + + + com.google.guava + guava + compile + + + + spark-ganglia-lgpl diff --git a/project/Relocator.scala b/project/Relocator.scala deleted file mode 100644 index c8c0a333b77b7..0000000000000 --- a/project/Relocator.scala +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io._ - -import scala.util.matching.Regex - -import org.objectweb.asm._ -import org.objectweb.asm.commons._ -import sbtassembly.Plugin._ - -/** - * Relocates classes that match the configuration to a new location. Tries to match the options - * available in the maven-shade-plugin. - * - * @param prefix Prefix that classes to be relocated must match. - * @param shaded New prefix for classes that match. - * @param includes Regexes for classes to include inside the matching package (empty = all). - * @param excludes Regexes for classes to exclude from the matching package (empty = none). - */ -class Relocator(prefix: String, shaded: String, includes: Seq[Regex], excludes: Seq[Regex]) { - - /** - * Renames a Java class name based on the configured rules for this relocator. - * - * @param name Class name to relocate. - * @return Relocated name (may be same as original). - */ - def rename(name: String): String = { - val javaName = name.replace('/', '.') - if (shouldRename(javaName)) { - val renamed = shaded + javaName.substring(prefix.length()) - renamed.replace('.', '/') - } else { - name - } - } - - private def shouldRename(name: String) = - name.startsWith(prefix) && isIncluded(name) && !isExcluded(name) - - private def isIncluded(name: String) = - includes.isEmpty || !includes.filter { m => m.pattern.matcher(name).matches() }.isEmpty - - private def isExcluded(name: String) = - !excludes.isEmpty && !excludes.filter { m => m.pattern.matcher(name).matches() }.isEmpty - -} - -class RelocatorRemapper(relocators: List[Relocator]) extends Remapper { - - override def mapValue(obj: Object) = { - if (obj.isInstanceOf[String]) { - rename(obj.asInstanceOf[String]) - } else { - super.mapValue(obj) - } - } - - override def map(name: String) = { - rename(name) - } - - def rename(name: String): String = { - var result = name - relocators.foreach { r => result = r.rename(result) } - result - } - -} - -/** - * Tries to emulate part of the class relocation behavior of maven-shade-plugin. Classes that - * should be relocated are moved to a new location, and all classes are passed through the - * remapper so that references to relocated classes are fixed. - * - * @param relocators List of relocators to apply to classes being shaded. - */ -class ShadeStrategy(relocators: List[Relocator]) extends MergeStrategy { - - private val remapper = new RelocatorRemapper(relocators) - - def name = "shade" - - override def apply(tempDir: File, path: String, files: Seq[File]) = { - val (file, newPath) = - if (relocators.isEmpty || !files.head.getAbsolutePath().endsWith(".class")) { - (files.head, path) - } else { - val className = path.substring(0, path.length() - ".class".length()) - (remap(files.head, tempDir), remapper.rename(className) + ".class") - } - Right(Seq(file -> newPath)) - } - - private def remap(klass: File, tempDir: File): File = { - var in: Option[FileInputStream] = None - var out: Option[FileOutputStream] = None - try { - in = Some(new FileInputStream(klass)) - - val writer = new ClassWriter(0) - val visitor= new RemappingClassAdapter(writer, remapper) - val reader = new ClassReader(in.get) - reader.accept(visitor, ClassReader.EXPAND_FRAMES) - - val remappedPath = File.createTempFile(klass.getName(), null, tempDir) - out = Some(new FileOutputStream(remappedPath)) - out.get.write(writer.toByteArray()) - out.get.close() - - remappedPath - } finally { - in.foreach { _.close() } - out.foreach { _.close() } - } - } - -} diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index e21b679a3cb84..4c696d3d385fb 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -30,12 +30,10 @@ object BuildCommons { private val buildLocation = file(".").getAbsoluteFile.getParentFile - val coreProject@Seq(core) = Seq("core").map(ProjectRef(buildLocation, _)) - - val allProjects@Seq(bagel, catalyst, graphx, hive, hiveThriftServer, mllib, repl, + val allProjects@Seq(bagel, catalyst, core, graphx, hive, hiveThriftServer, mllib, repl, sql, streaming, streamingFlumeSink, streamingFlume, streamingKafka, streamingMqtt, streamingTwitter, streamingZeromq) = - Seq("bagel", "catalyst", "graphx", "hive", "hive-thriftserver", "mllib", "repl", + Seq("bagel", "catalyst", "core", "graphx", "hive", "hive-thriftserver", "mllib", "repl", "sql", "streaming", "streaming-flume-sink", "streaming-flume", "streaming-kafka", "streaming-mqtt", "streaming-twitter", "streaming-zeromq").map(ProjectRef(buildLocation, _)) @@ -63,7 +61,7 @@ object SparkBuild extends PomBuild { def backwardCompatibility = { import scala.collection.mutable var isAlphaYarn = false - var profiles: mutable.Seq[String] = mutable.Seq.empty + var profiles: mutable.Seq[String] = mutable.Seq("sbt") if (Properties.envOrNone("SPARK_GANGLIA_LGPL").isDefined) { println("NOTE: SPARK_GANGLIA_LGPL is deprecated, please use -Pspark-ganglia-lgpl flag.") profiles ++= Seq("spark-ganglia-lgpl") @@ -136,19 +134,15 @@ object SparkBuild extends PomBuild { // Note ordering of these settings matter. /* Enable shared settings on all projects */ - (coreProject ++ allProjects ++ optionallyEnabledProjects ++ assemblyProjects) - .foreach(enable(sharedSettings)) + (allProjects ++ optionallyEnabledProjects ++ assemblyProjects).foreach(enable(sharedSettings)) /* Enable tests settings for all projects except examples, assembly and tools */ - (coreProject ++ allProjects ++ optionallyEnabledProjects).foreach(enable(TestSettings.settings)) + (allProjects ++ optionallyEnabledProjects).foreach(enable(TestSettings.settings)) // TODO: Add Sql to mima checks - (coreProject ++ allProjects).filterNot(x => Seq(spark, sql, hive, hiveThriftServer, catalyst, repl, + allProjects.filterNot(x => Seq(spark, sql, hive, hiveThriftServer, catalyst, repl, streamingFlumeSink).contains(x)).foreach(x => enable(MimaBuild.mimaSettings(sparkHome, x))(x)) - /* Set up assembly settings for the core project. */ - coreProject.foreach(enable(CoreAssembly.settings)) - /* Enable Assembly for all assembly projects */ assemblyProjects.foreach(enable(Assembly.settings)) @@ -234,7 +228,6 @@ object SQL { object Hive { lazy val settings = Seq( - javaOptions += "-XX:MaxPermSize=1g", // Multiple queries rely on the TestHive singleton. See comments there for more details. parallelExecution in Test := false, @@ -265,19 +258,12 @@ object Assembly { import sbtassembly.Plugin._ import AssemblyKeys._ - private val relocators = List( - new Relocator("com.google", "org.spark-project.guava", Seq("com\\.google\\.common\\..*".r), - Seq("com\\.google\\.common\\.base\\.Optional.*".r))) - private val shade = new ShadeStrategy(relocators) - lazy val settings = assemblySettings ++ Seq( test in assembly := {}, jarName in assembly <<= (version, moduleName) map { (v, mName) => mName + "-"+v + "-hadoop" + Option(System.getProperty("hadoop.version")).getOrElse("1.0.4") + ".jar" }, mergeStrategy in assembly := { case PathList("org", "datanucleus", xs @ _*) => MergeStrategy.discard - case PathList("org", "objectweb", "asm", xs @ _*) => MergeStrategy.discard - case m if m.endsWith(".class") => shade case m if m.toLowerCase.endsWith("manifest.mf") => MergeStrategy.discard case m if m.toLowerCase.matches("meta-inf.*\\.sf$") => MergeStrategy.discard case "log4j.properties" => MergeStrategy.discard @@ -289,41 +275,6 @@ object Assembly { } -/** - * Settings for the spark-core artifact. We don't want to expose Guava as a compile-time dependency, - * but at the same time the Java API exposes a Guava type (Optional). So we package it with the - * spark-core jar using the assembly plugin, and use the assembly deliverable as the main artifact - * for that project, disabling the non-assembly jar. - */ -object CoreAssembly { - import sbtassembly.Plugin._ - import AssemblyKeys._ - - lazy val settings = assemblySettings ++ Seq( - publishArtifact in (Compile, packageBin) := false, - test in assembly := {}, - jarName in assembly <<= (name, scalaVersion, version) map { - _ + "_" + _ + "-" + _ + "-assembly.jar" - }, - excludedJars in assembly := { - val cp = (fullClasspath in assembly).value - cp filter {!_.data.getName.startsWith("guava")} - }, - mergeStrategy in assembly := { - case PathList("com", "google", "common", "base", xs @ _*) => - if (xs.size == 1 && xs(0).startsWith("Optional")) { - MergeStrategy.first - } else { - MergeStrategy.discard - } - case PathList("META-INF", "maven", "com.google.guava", xs @ _*) => MergeStrategy.discard - case m if m.toLowerCase.endsWith("manifest.mf") => MergeStrategy.discard - case _ => MergeStrategy.first - } - ) ++ addArtifact(Artifact("spark-core"), assembly).settings - -} - object Unidoc { import BuildCommons._