diff --git a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 index 41a5ec241c64c..6e79d4af2f5ea 100644 --- a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 +++ b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 @@ -76,7 +76,7 @@ statement | ctes? dmlStatementNoWith #dmlStatement | USE identifierReference #use | USE namespace identifierReference #useNamespace - | SET CATALOG (identifier | stringLit) #setCatalog + | SET CATALOG (errorCapturingIdentifier | stringLit) #setCatalog | CREATE namespace (IF NOT EXISTS)? identifierReference (commentSpec | locationSpec | @@ -392,7 +392,7 @@ describeFuncName ; describeColName - : nameParts+=identifier (DOT nameParts+=identifier)* + : nameParts+=errorCapturingIdentifier (DOT nameParts+=errorCapturingIdentifier)* ; ctes @@ -429,7 +429,7 @@ property ; propertyKey - : identifier (DOT identifier)* + : errorCapturingIdentifier (DOT errorCapturingIdentifier)* | stringLit ; @@ -683,18 +683,18 @@ pivotClause ; pivotColumn - : identifiers+=identifier - | LEFT_PAREN identifiers+=identifier (COMMA identifiers+=identifier)* RIGHT_PAREN + : identifiers+=errorCapturingIdentifier + | LEFT_PAREN identifiers+=errorCapturingIdentifier (COMMA identifiers+=errorCapturingIdentifier)* RIGHT_PAREN ; pivotValue - : expression (AS? identifier)? + : expression (AS? errorCapturingIdentifier)? ; unpivotClause : UNPIVOT nullOperator=unpivotNullClause? LEFT_PAREN operator=unpivotOperator - RIGHT_PAREN (AS? identifier)? + RIGHT_PAREN (AS? errorCapturingIdentifier)? ; unpivotNullClause @@ -736,7 +736,7 @@ unpivotColumn ; unpivotAlias - : AS? identifier + : AS? errorCapturingIdentifier ; lateralView @@ -1188,7 +1188,7 @@ complexColTypeList ; complexColType - : identifier COLON? dataType (NOT NULL)? commentSpec? + : errorCapturingIdentifier COLON? dataType (NOT NULL)? commentSpec? ; whenClause diff --git a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala index 3b2bfda9a76ad..38ecd29266db7 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala @@ -201,7 +201,7 @@ class DataTypeAstBuilder extends SqlBaseParserBaseVisitor[AnyRef] { override def visitComplexColType(ctx: ComplexColTypeContext): StructField = withOrigin(ctx) { import ctx._ val structField = StructField( - name = identifier.getText, + name = errorCapturingIdentifier.getText, dataType = typedVisit(dataType()), nullable = NULL == null) Option(commentSpec).map(visitCommentSpec).map(structField.withComment).getOrElse(structField) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 170dcc37f0a56..f0fca5ba23c10 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -1255,7 +1255,7 @@ class AstBuilder extends DataTypeAstBuilder with SQLConfHelper with Logging { .flatMap(_.namedExpression.asScala) .map(typedVisit[Expression]) val pivotColumn = if (ctx.pivotColumn.identifiers.size == 1) { - UnresolvedAttribute.quoted(ctx.pivotColumn.identifier.getText) + UnresolvedAttribute.quoted(ctx.pivotColumn.errorCapturingIdentifier.getText) } else { CreateStruct( ctx.pivotColumn.identifiers.asScala.map( @@ -1270,8 +1270,8 @@ class AstBuilder extends DataTypeAstBuilder with SQLConfHelper with Logging { */ override def visitPivotValue(ctx: PivotValueContext): Expression = withOrigin(ctx) { val e = expression(ctx.expression) - if (ctx.identifier != null) { - Alias(e, ctx.identifier.getText)() + if (ctx.errorCapturingIdentifier != null) { + Alias(e, ctx.errorCapturingIdentifier.getText)() } else { e } @@ -1334,8 +1334,8 @@ class AstBuilder extends DataTypeAstBuilder with SQLConfHelper with Logging { } // alias unpivot result - if (ctx.identifier() != null) { - val alias = ctx.identifier().getText + if (ctx.errorCapturingIdentifier() != null) { + val alias = ctx.errorCapturingIdentifier().getText SubqueryAlias(alias, filtered) } else { filtered @@ -1355,7 +1355,7 @@ class AstBuilder extends DataTypeAstBuilder with SQLConfHelper with Logging { override def visitUnpivotColumnAndAlias(ctx: UnpivotColumnAndAliasContext): (NamedExpression, Option[String]) = withOrigin(ctx) { val attr = visitUnpivotColumn(ctx.unpivotColumn()) - val alias = Option(ctx.unpivotAlias()).map(_.identifier().getText) + val alias = Option(ctx.unpivotAlias()).map(_.errorCapturingIdentifier().getText) (attr, alias) } @@ -1367,7 +1367,7 @@ class AstBuilder extends DataTypeAstBuilder with SQLConfHelper with Logging { (Seq[NamedExpression], Option[String]) = withOrigin(ctx) { val exprs = ctx.unpivotColumns.asScala.map(visitUnpivotColumn).toSeq - val alias = Option(ctx.unpivotAlias()).map(_.identifier().getText) + val alias = Option(ctx.unpivotAlias()).map(_.errorCapturingIdentifier().getText) (exprs, alias) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala index ac22f32c85230..6fb37ae33fa8d 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala @@ -61,6 +61,10 @@ class ErrorParserSuite extends AnalysisTest { exception = parseException("USE test-test"), errorClass = "INVALID_IDENTIFIER", parameters = Map("ident" -> "test-test")) + checkError( + exception = parseException("SET CATALOG test-test"), + errorClass = "INVALID_IDENTIFIER", + parameters = Map("ident" -> "test-test")) checkError( exception = parseException("CREATE DATABASE IF NOT EXISTS my-database"), errorClass = "INVALID_IDENTIFIER", @@ -167,6 +171,10 @@ class ErrorParserSuite extends AnalysisTest { exception = parseException("ANALYZE TABLE test-table PARTITION (part1)"), errorClass = "INVALID_IDENTIFIER", parameters = Map("ident" -> "test-table")) + checkError( + exception = parseException("CREATE TABLE t(c1 struct)"), + errorClass = "INVALID_IDENTIFIER", + parameters = Map("ident" -> "test-test")) checkError( exception = parseException("LOAD DATA INPATH \"path\" INTO TABLE my-tab"), errorClass = "INVALID_IDENTIFIER", @@ -276,6 +284,19 @@ class ErrorParserSuite extends AnalysisTest { """.stripMargin), errorClass = "INVALID_IDENTIFIER", parameters = Map("ident" -> "test-table")) + checkError( + exception = parseException( + """ + |SELECT * FROM ( + | SELECT year, course, earnings FROM courseSales + |) + |PIVOT ( + | sum(earnings) + | FOR test-test IN ('dotNET', 'Java') + |); + """.stripMargin), + errorClass = "INVALID_IDENTIFIER", + parameters = Map("ident" -> "test-test")) } test("datatype not supported") { diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala index 0210491815c4b..28bcc33b1cdc3 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala @@ -265,8 +265,8 @@ class SparkSqlAstBuilder extends AstBuilder { * Create a [[SetCatalogCommand]] logical command. */ override def visitSetCatalog(ctx: SetCatalogContext): LogicalPlan = withOrigin(ctx) { - if (ctx.identifier() != null) { - SetCatalogCommand(ctx.identifier().getText) + if (ctx.errorCapturingIdentifier() != null) { + SetCatalogCommand(ctx.errorCapturingIdentifier().getText) } else if (ctx.stringLit() != null) { SetCatalogCommand(string(visitStringLit(ctx.stringLit()))) } else {