Skip to content

Commit

Permalink
Reply to comments from @MaxGekk
Browse files Browse the repository at this point in the history
  • Loading branch information
ostronaut committed Jan 6, 2025
1 parent ba29710 commit 2d76099
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -868,7 +868,7 @@ trait CheckAnalysis extends PredicateHelper with LookupCatalog with QueryErrorsB
summary = j.origin.context.summary)

// TODO: although map type is not orderable, technically map type should be able to be
// used in equality comparison, remove this type check once we support it.
// used in equality comparison, remove this type check once we support it.
case o if mapColumnInSetOperation(o).isDefined =>
val mapCol = mapColumnInSetOperation(o).get
o.failAnalysis(
Expand All @@ -891,15 +891,15 @@ trait CheckAnalysis extends PredicateHelper with LookupCatalog with QueryErrorsB
o.failAnalysis(
errorClass = "UNSUPPORTED_FEATURE.PARTITION_BY_VARIANT",
messageParameters = Map(
"expr" -> variantExpr.sql,
"expr" -> toSQLExpr(variantExpr),
"dataType" -> toSQLType(variantExpr.dataType)))

case o if mapExprInPartitionExpression(o).isDefined =>
val mapExpr = mapExprInPartitionExpression(o).get
o.failAnalysis(
errorClass = "UNSUPPORTED_FEATURE.PARTITION_BY_MAP",
messageParameters = Map(
"expr" -> mapExpr.sql,
"expr" -> toSQLExpr(mapExpr),
"dataType" -> toSQLType(mapExpr.dataType)))

case o if o.expressions.exists(!_.deterministic) &&
Expand Down
23 changes: 8 additions & 15 deletions sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -316,15 +316,15 @@ class DataFrameSuite extends QueryTest
exception = intercept[AnalysisException](df.repartition(5, col("v"))),
condition = "UNSUPPORTED_FEATURE.PARTITION_BY_VARIANT",
parameters = Map(
"expr" -> "v",
"expr" -> "\"v\"",
"dataType" -> "\"VARIANT\"")
)
// nested variant column
checkError(
exception = intercept[AnalysisException](df.repartition(5, col("s"))),
condition = "UNSUPPORTED_FEATURE.PARTITION_BY_VARIANT",
parameters = Map(
"expr" -> "s",
"expr" -> "\"s\"",
"dataType" -> "\"STRUCT<v: VARIANT NOT NULL>\"")
)
// variant producing expression
Expand All @@ -333,7 +333,7 @@ class DataFrameSuite extends QueryTest
intercept[AnalysisException](df.repartition(5, parse_json(col("id").cast("string")))),
condition = "UNSUPPORTED_FEATURE.PARTITION_BY_VARIANT",
parameters = Map(
"expr" -> "parse_json(CAST(id AS STRING))",
"expr" -> "\"parse_json(CAST(id AS STRING))\"",
"dataType" -> "\"VARIANT\"")
)
// Partitioning by non-variant column works
Expand All @@ -350,7 +350,7 @@ class DataFrameSuite extends QueryTest
exception = intercept[AnalysisException](sql("SELECT * FROM tv DISTRIBUTE BY v")),
condition = "UNSUPPORTED_FEATURE.PARTITION_BY_VARIANT",
parameters = Map(
"expr" -> "tv.v",
"expr" -> "\"v\"",
"dataType" -> "\"VARIANT\""),
context = ExpectedContext(
fragment = "DISTRIBUTE BY v",
Expand All @@ -361,7 +361,7 @@ class DataFrameSuite extends QueryTest
exception = intercept[AnalysisException](sql("SELECT * FROM tv DISTRIBUTE BY s")),
condition = "UNSUPPORTED_FEATURE.PARTITION_BY_VARIANT",
parameters = Map(
"expr" -> "tv.s",
"expr" -> "\"s\"",
"dataType" -> "\"STRUCT<v: VARIANT NOT NULL>\""),
context = ExpectedContext(
fragment = "DISTRIBUTE BY s",
Expand All @@ -378,32 +378,25 @@ class DataFrameSuite extends QueryTest
exception = intercept[AnalysisException](df.repartition(5, col("m"))),
condition = "UNSUPPORTED_FEATURE.PARTITION_BY_MAP",
parameters = Map(
"expr" -> "m",
"expr" -> "\"m\"",
"dataType" -> "\"MAP<BIGINT, BIGINT>\"")
)
// map producing expression
checkError(
exception = intercept[AnalysisException](df.repartition(5, map(col("id"), col("id")))),
condition = "UNSUPPORTED_FEATURE.PARTITION_BY_MAP",
parameters = Map(
"expr" -> "map(id, id)",
"expr" -> "\"map(id, id)\"",
"dataType" -> "\"MAP<BIGINT, BIGINT>\"")
)
// Partitioning by non-map column works
try {
df.repartition(5, col("id")).collect()
} catch {
case e: Exception =>
fail(s"Expected no exception to be thrown but an exception was thrown: ${e.getMessage}")
}
// SQL
withTempView("tv") {
df.createOrReplaceTempView("tv")
checkError(
exception = intercept[AnalysisException](sql("SELECT * FROM tv DISTRIBUTE BY m")),
condition = "UNSUPPORTED_FEATURE.PARTITION_BY_MAP",
parameters = Map(
"expr" -> "tv.m",
"expr" -> "\"m\"",
"dataType" -> "\"MAP<BIGINT, BIGINT>\""),
context = ExpectedContext(
fragment = "DISTRIBUTE BY m",
Expand Down

0 comments on commit 2d76099

Please sign in to comment.