Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[BUILD] Configure scalafmt. Currently, this applies only to Kernel. #4160

Merged
merged 2 commits into from
Feb 19, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 48 additions & 0 deletions .scalafmt.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# Copyright (2025) The Delta Lake Project Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

align = none
align.openParenDefnSite = false
align.openParenCallSite = false
align.tokens = []
importSelectors = "singleLine"
optIn.configStyleArguments = false
continuationIndent {
callSite = 2
defnSite = 4
}
danglingParentheses {
defnSite = false
callSite = false
}
docstrings {
style = Asterisk
wrap = no
}
literals.hexDigits = upper
maxColumn = 100
newlines {
beforeCurlyLambdaParams = false
source = keep
}
rewrite.rules = [Imports]
rewrite.imports.sort = scalastyle
rewrite.imports.groups = [
["java\\..*"],
["scala\\..*"],
["io\\.delta\\..*"],
["org\\.apache\\.spark\\.sql\\.delta.*"]
]
runner.dialect = scala212
version = 3.8.6
79 changes: 49 additions & 30 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -157,11 +157,26 @@ lazy val commonSettings = Seq(
unidocSourceFilePatterns := Nil,
)

// enforce java code style
def javafmtCheckSettings() = Seq(
////////////////////////////
// START: Code Formatting //
////////////////////////////

/** Enforce java code style on compile. */
def javafmtCheckSettings(): Seq[Def.Setting[Task[CompileAnalysis]]] = Seq(
(Compile / compile) := ((Compile / compile) dependsOn (Compile / javafmtCheckAll)).value
)

/** Enforce scala code style on compile. */
def scalafmtCheckSettings(): Seq[Def.Setting[Task[CompileAnalysis]]] = Seq(
(Compile / compile) := ((Compile / compile) dependsOn (Compile / scalafmtCheckAll)).value,
)

// TODO: define fmtAll and fmtCheckAll tasks that run both scala and java fmts/checks

//////////////////////////
// END: Code Formatting //
//////////////////////////

/**
* Note: we cannot access sparkVersion.value here, since that can only be used within a task or
* setting macro.
Expand Down Expand Up @@ -233,7 +248,7 @@ def runTaskOnlyOnSparkMaster[T](
}

lazy val connectCommon = (project in file("spark-connect/common"))
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-connect-common",
commonSettings,
Expand Down Expand Up @@ -272,7 +287,7 @@ lazy val connectCommon = (project in file("spark-connect/common"))
)

lazy val connectClient = (project in file("spark-connect/client"))
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.dependsOn(connectCommon % "compile->compile;test->test;provided->provided")
.settings(
name := "delta-connect-client",
Expand Down Expand Up @@ -361,7 +376,7 @@ lazy val connectClient = (project in file("spark-connect/client"))
lazy val connectServer = (project in file("spark-connect/server"))
.dependsOn(connectCommon % "compile->compile;test->test;provided->provided")
.dependsOn(spark % "compile->compile;test->test;provided->provided")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-connect-server",
commonSettings,
Expand Down Expand Up @@ -405,7 +420,7 @@ lazy val connectServer = (project in file("spark-connect/server"))
lazy val spark = (project in file("spark"))
.dependsOn(storage)
.enablePlugins(Antlr4Plugin)
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-spark",
commonSettings,
Expand Down Expand Up @@ -493,7 +508,7 @@ lazy val spark = (project in file("spark"))

lazy val contribs = (project in file("contribs"))
.dependsOn(spark % "compile->compile;test->test;provided->provided")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-contribs",
commonSettings,
Expand Down Expand Up @@ -532,7 +547,7 @@ lazy val contribs = (project in file("contribs"))

lazy val sharing = (project in file("sharing"))
.dependsOn(spark % "compile->compile;test->test;provided->provided")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-sharing-spark",
commonSettings,
Expand All @@ -558,12 +573,14 @@ lazy val sharing = (project in file("sharing"))
).configureUnidoc()

lazy val kernelApi = (project in file("kernel/kernel-api"))
.enablePlugins(ScalafmtPlugin)
.settings(
name := "delta-kernel-api",
commonSettings,
scalaStyleSettings,
javaOnlyReleaseSettings,
javafmtCheckSettings,
scalafmtCheckSettings,
Test / javaOptions ++= Seq("-ea"),
libraryDependencies ++= Seq(
"org.roaringbitmap" % "RoaringBitmap" % "0.9.25",
Expand Down Expand Up @@ -638,6 +655,7 @@ lazy val kernelApi = (project in file("kernel/kernel-api"))
).configureUnidoc(docTitle = "Delta Kernel")

lazy val kernelDefaults = (project in file("kernel/kernel-defaults"))
.enablePlugins(ScalafmtPlugin)
.dependsOn(kernelApi)
.dependsOn(kernelApi % "test->test")
.dependsOn(storage)
Expand All @@ -650,6 +668,7 @@ lazy val kernelDefaults = (project in file("kernel/kernel-defaults"))
scalaStyleSettings,
javaOnlyReleaseSettings,
javafmtCheckSettings,
scalafmtCheckSettings,
Test / javaOptions ++= Seq("-ea"),
libraryDependencies ++= Seq(
"org.apache.hadoop" % "hadoop-client-runtime" % hadoopVersion,
Expand Down Expand Up @@ -682,7 +701,7 @@ lazy val kernelDefaults = (project in file("kernel/kernel-defaults"))
// TODO unidoc
// TODO(scott): figure out a better way to include tests in this project
lazy val storage = (project in file("storage"))
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-storage",
commonSettings,
Expand All @@ -707,7 +726,7 @@ lazy val storage = (project in file("storage"))
lazy val storageS3DynamoDB = (project in file("storage-s3-dynamodb"))
.dependsOn(storage % "compile->compile;test->test;provided->provided")
.dependsOn(spark % "test->test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-storage-s3-dynamodb",
commonSettings,
Expand All @@ -733,7 +752,7 @@ val icebergSparkRuntimeArtifactName = {
lazy val testDeltaIcebergJar = (project in file("testDeltaIcebergJar"))
// delta-iceberg depends on delta-spark! So, we need to include it during our test.
.dependsOn(spark % "test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "test-delta-iceberg-jar",
commonSettings,
Expand Down Expand Up @@ -763,7 +782,7 @@ val deltaIcebergSparkIncludePrefixes = Seq(
// scalastyle:off println
lazy val iceberg = (project in file("iceberg"))
.dependsOn(spark % "compile->compile;test->test;provided->provided")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-iceberg",
commonSettings,
Expand Down Expand Up @@ -833,7 +852,7 @@ lazy val generateIcebergJarsTask = TaskKey[Unit]("generateIcebergJars", "Generat

lazy val icebergShaded = (project in file("icebergShaded"))
.dependsOn(spark % "provided")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "iceberg-shaded",
commonSettings,
Expand Down Expand Up @@ -864,7 +883,7 @@ lazy val icebergShaded = (project in file("icebergShaded"))

lazy val hudi = (project in file("hudi"))
.dependsOn(spark % "compile->compile;test->test;provided->provided")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-hudi",
commonSettings,
Expand Down Expand Up @@ -916,7 +935,7 @@ lazy val hudi = (project in file("hudi"))

lazy val hive = (project in file("connectors/hive"))
.dependsOn(standaloneCosmetic)
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-hive",
commonSettings,
Expand All @@ -933,7 +952,7 @@ lazy val hive = (project in file("connectors/hive"))

lazy val hiveAssembly = (project in file("connectors/hive-assembly"))
.dependsOn(hive)
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-hive-assembly",
Compile / unmanagedJars += (hive / Compile / packageBin / packageBin).value,
Expand All @@ -960,7 +979,7 @@ lazy val hiveAssembly = (project in file("connectors/hive-assembly"))

lazy val hiveTest = (project in file("connectors/hive-test"))
.dependsOn(goldenTables % "test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "hive-test",
// Make the project use the assembly jar to ensure we are testing the assembly jar that users
Expand Down Expand Up @@ -993,7 +1012,7 @@ lazy val hiveTest = (project in file("connectors/hive-test"))

lazy val hiveMR = (project in file("connectors/hive-mr"))
.dependsOn(hiveTest % "test->test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "hive-mr",
commonSettings,
Expand All @@ -1020,7 +1039,7 @@ lazy val hiveMR = (project in file("connectors/hive-mr"))

lazy val hiveTez = (project in file("connectors/hive-tez"))
.dependsOn(hiveTest % "test->test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "hive-tez",
commonSettings,
Expand Down Expand Up @@ -1064,7 +1083,7 @@ lazy val hiveTez = (project in file("connectors/hive-tez"))

lazy val hive2MR = (project in file("connectors/hive2-mr"))
.dependsOn(goldenTables % "test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "hive2-mr",
commonSettings,
Expand Down Expand Up @@ -1095,7 +1114,7 @@ lazy val hive2MR = (project in file("connectors/hive2-mr"))

lazy val hive2Tez = (project in file("connectors/hive2-tez"))
.dependsOn(goldenTables % "test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "hive2-tez",
commonSettings,
Expand Down Expand Up @@ -1162,7 +1181,7 @@ lazy val hive2Tez = (project in file("connectors/hive2-tez"))
*/
lazy val standaloneCosmetic = project
.dependsOn(storage) // this doesn't impact the output artifact (jar), only the pom.xml dependencies
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-standalone",
commonSettings,
Expand All @@ -1182,7 +1201,7 @@ lazy val standaloneCosmetic = project
lazy val testStandaloneCosmetic = (project in file("connectors/testStandaloneCosmetic"))
.dependsOn(standaloneCosmetic)
.dependsOn(goldenTables % "test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "test-standalone-cosmetic",
commonSettings,
Expand All @@ -1199,7 +1218,7 @@ lazy val testStandaloneCosmetic = (project in file("connectors/testStandaloneCos
* except `ParquetSchemaConverter` are working without `parquet-hadoop` in testStandaloneCosmetic`.
*/
lazy val testParquetUtilsWithStandaloneCosmetic = project.dependsOn(standaloneCosmetic)
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "test-parquet-utils-with-standalone-cosmetic",
commonSettings,
Expand All @@ -1223,7 +1242,7 @@ def scalaCollectionPar(version: String) = version match {
* create a separate project to skip the shading.
*/
lazy val standaloneParquet = (project in file("connectors/standalone-parquet"))
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.dependsOn(standaloneWithoutParquetUtils)
.settings(
name := "delta-standalone-parquet",
Expand All @@ -1238,7 +1257,7 @@ lazy val standaloneParquet = (project in file("connectors/standalone-parquet"))

/** A dummy project to allow `standaloneParquet` depending on the shaded standalone jar. */
lazy val standaloneWithoutParquetUtils = project
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-standalone-without-parquet-utils",
commonSettings,
Expand All @@ -1251,7 +1270,7 @@ lazy val standaloneWithoutParquetUtils = project
lazy val standalone = (project in file("connectors/standalone"))
.dependsOn(storage % "compile->compile;provided->provided")
.dependsOn(goldenTables % "test")
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "delta-standalone-original",
commonSettings,
Expand Down Expand Up @@ -1376,7 +1395,7 @@ lazy val compatibility = (project in file("connectors/oss-compatibility-tests"))

lazy val goldenTables = (project in file("connectors/golden-tables"))
.dependsOn(spark % "test") // depends on delta-spark
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings(
name := "golden-tables",
commonSettings,
Expand All @@ -1403,7 +1422,7 @@ def sqlDeltaImportScalaVersion(scalaBinaryVersion: String): String = {

lazy val sqlDeltaImport = (project in file("connectors/sql-delta-import"))
.dependsOn(spark)
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "sql-delta-import",
commonSettings,
Expand Down Expand Up @@ -1435,7 +1454,7 @@ lazy val flink = (project in file("connectors/flink"))
.dependsOn(standaloneCosmetic % "provided")
.dependsOn(kernelApi)
.dependsOn(kernelDefaults)
.disablePlugins(JavaFormatterPlugin)
.disablePlugins(JavaFormatterPlugin, ScalafmtPlugin)
.settings (
name := "delta-flink",
commonSettings,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import scala.collection.JavaConverters._
import io.delta.kernel.internal.util.Utils
import io.delta.kernel.utils.CloseableIterator
import io.delta.kernel.utils.CloseableIterator.BreakableFilterResult

import org.scalatest.funsuite.AnyFunSuite

class CloseableIteratorSuite extends AnyFunSuite {
Expand Down
Loading
Loading