Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-42307][SQL] Assign name for error _LEGACY_ERROR_TEMP_2232 #47354

Closed
wants to merge 8 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 6 additions & 5 deletions common/utils/src/main/resources/error/error-conditions.json
Original file line number Diff line number Diff line change
Expand Up @@ -3746,6 +3746,12 @@
],
"sqlState" : "21000"
},
"ROW_VALUE_IS_NULL" : {
"message" : [
"Found NULL in a row at the index <index>, expected a non-NULL value."
],
"sqlState" : "22023"
},
"RULE_ID_NOT_FOUND" : {
"message" : [
"Not found an id for the rule name \"<ruleName>\". Please modify RuleIdCollection.scala if you are adding a new rule."
Expand Down Expand Up @@ -7253,11 +7259,6 @@
"Primitive types are not supported."
]
},
"_LEGACY_ERROR_TEMP_2232" : {
"message" : [
"Value at index <index> is null."
]
},
"_LEGACY_ERROR_TEMP_2233" : {
"message" : [
"Only Data Sources providing FileFormat are supported: <providingClass>."
Expand Down
16 changes: 8 additions & 8 deletions sql/api/src/main/scala/org/apache/spark/sql/Row.scala
Original file line number Diff line number Diff line change
Expand Up @@ -219,39 +219,39 @@ trait Row extends Serializable {
* Returns the value at position i as a primitive boolean.
*
* @throws ClassCastException when data type does not match.
* @throws NullPointerException when value is null.
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could you clarify why did you remove it instead of replacing NullPointerException by SparkException, just wonder.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oops I misunderstood your previous comment. Replaced NPE with SparkException.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

And one more thing is need to update the SQL migration guide because this might impact user apps and potentially break them. Please, add an item to sql-migration-guide.md.

* @throws org.apache.spark.SparkRuntimeException when value is null.
*/
def getBoolean(i: Int): Boolean = getAnyValAs[Boolean](i)

/**
* Returns the value at position i as a primitive byte.
*
* @throws ClassCastException when data type does not match.
* @throws NullPointerException when value is null.
* @throws org.apache.spark.SparkRuntimeException when value is null.
*/
def getByte(i: Int): Byte = getAnyValAs[Byte](i)

/**
* Returns the value at position i as a primitive short.
*
* @throws ClassCastException when data type does not match.
* @throws NullPointerException when value is null.
* @throws org.apache.spark.SparkRuntimeException when value is null.
*/
def getShort(i: Int): Short = getAnyValAs[Short](i)

/**
* Returns the value at position i as a primitive int.
*
* @throws ClassCastException when data type does not match.
* @throws NullPointerException when value is null.
* @throws org.apache.spark.SparkRuntimeException when value is null.
*/
def getInt(i: Int): Int = getAnyValAs[Int](i)

/**
* Returns the value at position i as a primitive long.
*
* @throws ClassCastException when data type does not match.
* @throws NullPointerException when value is null.
* @throws org.apache.spark.SparkRuntimeException when value is null.
*/
def getLong(i: Int): Long = getAnyValAs[Long](i)

Expand All @@ -260,15 +260,15 @@ trait Row extends Serializable {
* Throws an exception if the type mismatches or if the value is null.
*
* @throws ClassCastException when data type does not match.
* @throws NullPointerException when value is null.
* @throws org.apache.spark.SparkRuntimeException when value is null.
*/
def getFloat(i: Int): Float = getAnyValAs[Float](i)

/**
* Returns the value at position i as a primitive double.
*
* @throws ClassCastException when data type does not match.
* @throws NullPointerException when value is null.
* @throws org.apache.spark.SparkRuntimeException when value is null.
*/
def getDouble(i: Int): Double = getAnyValAs[Double](i)

Expand Down Expand Up @@ -523,7 +523,7 @@ trait Row extends Serializable {
*
* @throws UnsupportedOperationException when schema is not defined.
* @throws ClassCastException when data type does not match.
* @throws NullPointerException when value is null.
* @throws org.apache.spark.SparkRuntimeException when value is null.
*/
private def getAnyValAs[T <: AnyVal](i: Int): T =
if (isNullAt(i)) throw DataTypeErrors.valueIsNullError(i)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -272,8 +272,8 @@ private[sql] object DataTypeErrors extends DataTypeErrorsBase {
}

def valueIsNullError(index: Int): Throwable = {
new SparkException(
errorClass = "_LEGACY_ERROR_TEMP_2232",
new SparkRuntimeException(
errorClass = "ROW_VALUE_IS_NULL",
messageParameters = Map(
"index" -> index.toString),
cause = null)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._

import org.apache.spark.{SparkException, SparkIllegalArgumentException, SparkUnsupportedOperationException}
import org.apache.spark.{SparkIllegalArgumentException, SparkRuntimeException, SparkUnsupportedOperationException}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{GenericRow, GenericRowWithSchema}
import org.apache.spark.sql.types._
Expand Down Expand Up @@ -87,8 +87,9 @@ class RowTest extends AnyFunSpec with Matchers {
sampleRowWithoutCol3.getValuesMap[String](List("col1", "col2")) shouldBe expected
}

it("getAs() on type extending AnyVal throws an exception when accessing field that is null") {
intercept[SparkException] {
it("getAnyValAs() on type extending AnyVal throws an exception when accessing " +
"field that is null") {
intercept[SparkRuntimeException] {
sampleRowWithoutCol3.getInt(sampleRowWithoutCol3.fieldIndex("col3"))
}
}
Expand Down
15 changes: 14 additions & 1 deletion sql/core/src/test/scala/org/apache/spark/sql/RowSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark.sql

import org.apache.spark.{SparkFunSuite, SparkUnsupportedOperationException}
import org.apache.spark.{SparkFunSuite, SparkRuntimeException, SparkUnsupportedOperationException}
import org.apache.spark.sql.catalyst.expressions.{GenericInternalRow, SpecificInternalRow}
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types._
Expand Down Expand Up @@ -123,4 +123,17 @@ class RowSuite extends SparkFunSuite with SharedSparkSession {
parameters = Map("methodName" -> "fieldIndex", "className" -> "Row", "fieldName" -> "`foo`")
)
}

test("SPARK-42307: get a value from a null column should result in error") {
val position = 0
val rowWithNullValue = Row.fromSeq(Seq(null))

checkError(
exception = intercept[SparkRuntimeException] {
rowWithNullValue.getLong(position)
},
errorClass = "ROW_VALUE_IS_NULL",
parameters = Map("index" -> position.toString)
)
}
}