Skip to content

Commit

Permalink
chore: Remove an unused config (#430)
Browse files Browse the repository at this point in the history
  • Loading branch information
andygrove authored May 14, 2024
1 parent 32b7318 commit 2bf7d12
Show file tree
Hide file tree
Showing 9 changed files with 1 addition and 22 deletions.
9 changes: 0 additions & 9 deletions common/src/main/scala/org/apache/comet/CometConf.scala
Original file line number Diff line number Diff line change
Expand Up @@ -120,15 +120,6 @@ object CometConf {
.booleanConf
.createWithDefault(false)

val COMET_EXEC_ALL_EXPR_ENABLED: ConfigEntry[Boolean] =
conf(s"$COMET_EXEC_CONFIG_PREFIX.all.expr.enabled")
.doc(
"Whether to enable all Comet exprs. By default, this config is false. Note that " +
"this config precedes all separate config 'spark.comet.exec.<expr_name>.enabled'. " +
"That being said, if this config is enabled, separate configs are ignored.")
.booleanConf
.createWithDefault(false)

val COMET_EXEC_SHUFFLE_ENABLED: ConfigEntry[Boolean] =
conf(s"$COMET_EXEC_CONFIG_PREFIX.shuffle.enabled")
.doc(
Expand Down
1 change: 0 additions & 1 deletion docs/source/user-guide/configs.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ Comet provides the following configuration settings.
| spark.comet.enabled | Whether to enable Comet extension for Spark. When this is turned on, Spark will use Comet to read Parquet data source. Note that to enable native vectorized execution, both this config and 'spark.comet.exec.enabled' need to be enabled. By default, this config is the value of the env var `ENABLE_COMET` if set, or true otherwise. | true |
| spark.comet.exceptionOnDatetimeRebase | Whether to throw exception when seeing dates/timestamps from the legacy hybrid (Julian + Gregorian) calendar. Since Spark 3, dates/timestamps were written according to the Proleptic Gregorian calendar. When this is true, Comet will throw exceptions when seeing these dates/timestamps that were written by Spark version before 3.0. If this is false, these dates/timestamps will be read as if they were written to the Proleptic Gregorian calendar and will not be rebased. | false |
| spark.comet.exec.all.enabled | Whether to enable all Comet operators. By default, this config is false. Note that this config precedes all separate config 'spark.comet.exec.<operator_name>.enabled'. That being said, if this config is enabled, separate configs are ignored. | false |
| spark.comet.exec.all.expr.enabled | Whether to enable all Comet exprs. By default, this config is false. Note that this config precedes all separate config 'spark.comet.exec.<expr_name>.enabled'. That being said, if this config is enabled, separate configs are ignored. | false |
| spark.comet.exec.enabled | Whether to enable Comet native vectorized execution for Spark. This controls whether Spark should convert operators into their Comet counterparts and execute them in native space. Note: each operator is associated with a separate config in the format of 'spark.comet.exec.<operator_name>.enabled' at the moment, and both the config and this need to be turned on, in order for the operator to be executed in native. By default, this config is false. | false |
| spark.comet.exec.memoryFraction | The fraction of memory from Comet memory overhead that the native memory manager can use for execution. The purpose of this config is to set aside memory for untracked data structures, as well as imprecise size estimation during memory acquisition. Default value is 0.7. | 0.7 |
| spark.comet.exec.shuffle.codec | The codec of Comet native shuffle used to compress shuffle data. Only zstd is supported. | zstd |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -954,10 +954,6 @@ object CometSparkSessionExtensions extends Logging {
COMET_EXEC_ALL_OPERATOR_ENABLED.get(conf)
}

private[comet] def isCometAllExprEnabled(conf: SQLConf): Boolean = {
COMET_EXEC_ALL_EXPR_ENABLED.get(conf)
}

private[comet] def isSchemaSupported(schema: StructType): Boolean =
schema.map(_.dataType).forall(isTypeSupported)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1396,7 +1396,6 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
CometConf.COMET_ENABLED.key -> "true",
CometConf.COMET_EXEC_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_ENFORCE_MODE_ENABLED.key -> "true",
CometConf.COMET_EXEC_ALL_EXPR_ENABLED.key -> "true",
CometConf.COMET_EXEC_ALL_OPERATOR_ENABLED.key -> "true",
"spark.sql.extendedExplainProvider" -> "org.apache.comet.ExtendedExplainInfo") {
val table = "test"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -176,8 +176,7 @@ class CometNativeShuffleSuite extends CometTestBase with AdaptiveSparkPlanHelper
Seq(10, 201).foreach { numPartitions =>
withSQLConf(
CometConf.COMET_BATCH_SIZE.key -> "10",
CometConf.COMET_EXEC_ALL_OPERATOR_ENABLED.key -> "true",
CometConf.COMET_EXEC_ALL_EXPR_ENABLED.key -> "true") {
CometConf.COMET_EXEC_ALL_OPERATOR_ENABLED.key -> "true") {
withParquetTable((0 until 50).map(i => (1.toString, 2.toString, (i + 1).toLong)), "tbl") {
val df = sql("SELECT * FROM tbl")
.filter($"_1" === 1.toString)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,6 @@ class CometTPCDSQuerySuite
conf.set(CometConf.COMET_ENABLED.key, "true")
conf.set(CometConf.COMET_EXEC_ENABLED.key, "true")
conf.set(CometConf.COMET_EXEC_ALL_OPERATOR_ENABLED.key, "true")
conf.set(CometConf.COMET_EXEC_ALL_EXPR_ENABLED.key, "true")
conf.set(CometConf.COMET_EXEC_SHUFFLE_ENABLED.key, "true")
conf.set(CometConf.COMET_MEMORY_OVERHEAD.key, "20g")
conf.set(MEMORY_OFFHEAP_ENABLED.key, "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,6 @@ class CometTPCHQuerySuite extends QueryTest with CometTPCBase with SQLQueryTestH
conf.set(CometConf.COMET_ENABLED.key, "true")
conf.set(CometConf.COMET_EXEC_ENABLED.key, "true")
conf.set(CometConf.COMET_EXEC_ALL_OPERATOR_ENABLED.key, "true")
conf.set(CometConf.COMET_EXEC_ALL_EXPR_ENABLED.key, "true")
conf.set(CometConf.COMET_EXEC_SHUFFLE_ENABLED.key, "true")
conf.set(CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.key, "true")
conf.set(MEMORY_OFFHEAP_ENABLED.key, "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,6 @@ abstract class CometTestBase
conf.set(CometConf.COMET_ENABLED.key, "true")
conf.set(CometConf.COMET_EXEC_ENABLED.key, "true")
conf.set(CometConf.COMET_EXEC_ALL_OPERATOR_ENABLED.key, "true")
conf.set(CometConf.COMET_EXEC_ALL_EXPR_ENABLED.key, "true")
conf.set(CometConf.COMET_ROW_TO_COLUMNAR_ENABLED.key, "true")
conf.set(CometConf.COMET_MEMORY_OVERHEAD.key, "2g")
conf
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,6 @@ trait CometPlanStabilitySuite extends DisableAdaptiveExecutionSuite with TPCDSBa
CometConf.COMET_EXEC_ENABLED.key -> "true",
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_EXEC_ALL_OPERATOR_ENABLED.key -> "true",
CometConf.COMET_EXEC_ALL_EXPR_ENABLED.key -> "true",
CometConf.COMET_CAST_ALLOW_INCOMPATIBLE.key -> "true", // needed for v1.4/q9, v1.4/q44, v2.7.0/q6, v2.7.0/q64
"spark.sql.readSideCharPadding" -> "false",
SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "10MB") {
Expand Down Expand Up @@ -288,7 +287,6 @@ trait CometPlanStabilitySuite extends DisableAdaptiveExecutionSuite with TPCDSBa
conf.set(CometConf.COMET_EXEC_ENABLED.key, "true")
conf.set(CometConf.COMET_MEMORY_OVERHEAD.key, "1g")
conf.set(CometConf.COMET_EXEC_ALL_OPERATOR_ENABLED.key, "true")
conf.set(CometConf.COMET_EXEC_ALL_EXPR_ENABLED.key, "true")
conf.set(CometConf.COMET_EXEC_SHUFFLE_ENABLED.key, "true")

new TestSparkSession(new SparkContext("local[1]", this.getClass.getCanonicalName, conf))
Expand Down

0 comments on commit 2bf7d12

Please sign in to comment.