diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala index 6f9e666012a09..95dc95b14192d 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala @@ -250,20 +250,9 @@ class ResolveSessionCatalog( case DescribeRelation(ResolvedView(ident), partitionSpec, isExtended) => DescribeTableCommand(ident.asTableIdentifier, partitionSpec, isExtended) - case DescribeColumnStatement( - SessionCatalogAndTable(catalog, tbl), colNameParts, isExtended) => - loadTable(catalog, tbl.asIdentifier).collect { - // `V1Table` also includes permanent views. - case v1Table: V1Table => - DescribeColumnCommand(tbl.asTableIdentifier, colNameParts, isExtended) - }.getOrElse { - if (isTempView(tbl)) { - // v1 DESCRIBE COLUMN supports temp view. - DescribeColumnCommand(tbl.asTableIdentifier, colNameParts, isExtended) - } else { - throw new AnalysisException("Describing columns is not supported for v2 tables.") - } - } + case DescribeColumnStatement(tbl, colNameParts, isExtended) => + val name = parseTempViewOrV1Table(tbl, "Describing columns") + DescribeColumnCommand(name.asTableIdentifier, colNameParts, isExtended) // For CREATE TABLE [AS SELECT], we should use the v1 command if the catalog is resolved to the // session catalog and the table provider is not v2. @@ -396,7 +385,7 @@ class ResolveSessionCatalog( } case AnalyzeColumnStatement(tbl, columnNames, allColumns) => - val v1TableName = parseV1Table(tbl, "ANALYZE TABLE") + val v1TableName = parseTempViewOrV1Table(tbl, "ANALYZE TABLE") AnalyzeColumnCommand(v1TableName.asTableIdentifier, columnNames, allColumns) case RepairTableStatement(tbl) => @@ -415,8 +404,8 @@ class ResolveSessionCatalog( partition) case ShowCreateTableStatement(tbl, asSerde) if !asSerde => - val v1TableName = parseV1Table(tbl, "SHOW CREATE TABLE") - ShowCreateTableCommand(v1TableName.asTableIdentifier) + val name = parseTempViewOrV1Table(tbl, "SHOW CREATE TABLE") + ShowCreateTableCommand(name.asTableIdentifier) case ShowCreateTableStatement(tbl, asSerde) if asSerde => val v1TableName = parseV1Table(tbl, "SHOW CREATE TABLE AS SERDE") @@ -449,20 +438,27 @@ class ResolveSessionCatalog( partitionSpec) case ShowColumnsStatement(tbl, ns) => + if (ns.isDefined && ns.get.length > 1) { + throw new AnalysisException( + s"Namespace name should have only one part if specified: ${ns.get.quoted}") + } + // Use namespace only if table name doesn't specify it. If namespace is already specified + // in the table name, it's checked against the given namespace below. + val nameParts = if (ns.isDefined && tbl.length == 1) { + ns.get ++ tbl + } else { + tbl + } val sql = "SHOW COLUMNS" - val v1TableName = parseV1Table(tbl, sql).asTableIdentifier + val v1TableName = parseTempViewOrV1Table(nameParts, sql).asTableIdentifier val resolver = conf.resolver val db = ns match { - case Some(db) if (v1TableName.database.exists(!resolver(_, db.head))) => + case Some(db) if v1TableName.database.exists(!resolver(_, db.head)) => throw new AnalysisException( s"SHOW COLUMNS with conflicting databases: " + s"'${db.head}' != '${v1TableName.database.get}'") case _ => ns.map(_.head) } - if (ns.isDefined && ns.get.length > 1) { - throw new AnalysisException( - s"Namespace name should have only one part if specified: ${ns.get.quoted}") - } ShowColumnsCommand(db, v1TableName) case AlterTableRecoverPartitionsStatement(tbl) => @@ -659,14 +655,7 @@ class ResolveSessionCatalog( object SessionCatalogAndTable { def unapply(nameParts: Seq[String]): Option[(CatalogPlugin, Seq[String])] = nameParts match { case SessionCatalogAndIdentifier(catalog, ident) => - if (nameParts.length == 1) { - // If there is only one name part, it means the current catalog is the session catalog. - // Here we return the original name part, to keep the error message unchanged for - // v1 commands. - Some(catalog -> nameParts) - } else { - Some(catalog -> ident.asMultipartIdentifier) - } + Some(catalog -> ident.asMultipartIdentifier) case _ => None } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala index cef9b5f675889..2ed33b867183b 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala @@ -167,14 +167,15 @@ class V2SessionCatalog(catalog: SessionCatalog, conf: SQLConf) } implicit class TableIdentifierHelper(ident: Identifier) { + import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.IdentifierHelper + def asTableIdentifier: TableIdentifier = { ident.namespace match { case Array(db) => TableIdentifier(ident.name, Some(db)) - case Array() => - TableIdentifier(ident.name, Some(catalog.getCurrentDatabase)) case _ => - throw new NoSuchTableException(ident) + throw new NoSuchTableException( + s"V2 session catalog requires a single-part namespace: ${ident.quoted}") } } } diff --git a/sql/core/src/test/resources/sql-tests/results/describe.sql.out b/sql/core/src/test/resources/sql-tests/results/describe.sql.out index a7ef828e4d5d2..a7de033e3a1ac 100644 --- a/sql/core/src/test/resources/sql-tests/results/describe.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/describe.sql.out @@ -561,7 +561,7 @@ struct -- !query output == Physical Plan == Execute DescribeColumnCommand - +- DescribeColumnCommand `t`, [b], false + +- DescribeColumnCommand `default`.`t`, [b], false -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/explain.sql.out b/sql/core/src/test/resources/sql-tests/results/explain.sql.out index 40b26e6fb64ce..06226f1274863 100644 --- a/sql/core/src/test/resources/sql-tests/results/explain.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/explain.sql.out @@ -879,7 +879,7 @@ Execute CreateViewCommand (1) Output: [] (2) CreateViewCommand -Arguments: `explain_view`, SELECT key, val FROM explain_temp1, false, false, PersistedView +Arguments: `default`.`explain_view`, SELECT key, val FROM explain_temp1, false, false, PersistedView (3) UnresolvedRelation Arguments: [explain_temp1] diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out index 436b33ce43980..85ce978657844 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out @@ -266,7 +266,7 @@ CREATE VIEW v1_temp AS SELECT * FROM temp_table struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `v1_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW; +Not allowed to create a permanent view `temp_view_test`.`v1_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW; -- !query @@ -371,7 +371,7 @@ CREATE VIEW v4_temp AS struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `v4_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW; +Not allowed to create a permanent view `temp_view_test`.`v4_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW; -- !query @@ -383,7 +383,7 @@ CREATE VIEW v5_temp AS struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `v5_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW; +Not allowed to create a permanent view `temp_view_test`.`v5_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW; -- !query @@ -542,7 +542,7 @@ CREATE VIEW v6_temp AS SELECT * FROM base_table WHERE id IN (SELECT id FROM temp struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `v6_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW; +Not allowed to create a permanent view `temp_view_test`.`v6_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW; -- !query @@ -551,7 +551,7 @@ CREATE VIEW v7_temp AS SELECT t1.id, t2.a FROM base_table t1, (SELECT * FROM tem struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `v7_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW; +Not allowed to create a permanent view `temp_view_test`.`v7_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW; -- !query @@ -560,7 +560,7 @@ CREATE VIEW v8_temp AS SELECT * FROM base_table WHERE EXISTS (SELECT 1 FROM temp struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `v8_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW; +Not allowed to create a permanent view `temp_view_test`.`v8_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW; -- !query @@ -569,7 +569,7 @@ CREATE VIEW v9_temp AS SELECT * FROM base_table WHERE NOT EXISTS (SELECT 1 FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `v9_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW; +Not allowed to create a permanent view `temp_view_test`.`v9_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW; -- !query @@ -678,7 +678,7 @@ CREATE VIEW temporal1 AS SELECT * FROM t1 CROSS JOIN tt struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `temporal1` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW; +Not allowed to create a permanent view `testviewschm2`.`temporal1` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW; -- !query @@ -719,7 +719,7 @@ CREATE VIEW temporal2 AS SELECT * FROM t1 INNER JOIN tt ON t1.num = tt.num2 struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `temporal2` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW; +Not allowed to create a permanent view `testviewschm2`.`temporal2` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW; -- !query @@ -760,7 +760,7 @@ CREATE VIEW temporal3 AS SELECT * FROM t1 LEFT JOIN tt ON t1.num = tt.num2 struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `temporal3` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW; +Not allowed to create a permanent view `testviewschm2`.`temporal3` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW; -- !query @@ -801,7 +801,7 @@ CREATE VIEW temporal4 AS SELECT * FROM t1 LEFT JOIN tt ON t1.num = tt.num2 AND t struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `temporal4` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW; +Not allowed to create a permanent view `testviewschm2`.`temporal4` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW; -- !query @@ -810,7 +810,7 @@ CREATE VIEW temporal5 AS SELECT * FROM t1 WHERE num IN (SELECT num FROM t1 WHERE struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `temporal5` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW; +Not allowed to create a permanent view `testviewschm2`.`temporal5` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW; -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out b/sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out index e8ee07171651d..f5ca1eff9f0c3 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out @@ -15,7 +15,7 @@ SHOW CREATE TABLE tbl -- !query schema struct -- !query output -CREATE TABLE `tbl` ( +CREATE TABLE `default`.`tbl` ( `a` INT, `b` STRING, `c` INT) @@ -44,7 +44,7 @@ SHOW CREATE TABLE tbl -- !query schema struct -- !query output -CREATE TABLE `tbl` ( +CREATE TABLE `default`.`tbl` ( `a` INT, `b` STRING, `c` INT) @@ -75,7 +75,7 @@ SHOW CREATE TABLE tbl -- !query schema struct -- !query output -CREATE TABLE `tbl` ( +CREATE TABLE `default`.`tbl` ( `a` INT, `b` STRING, `c` INT) @@ -105,7 +105,7 @@ SHOW CREATE TABLE tbl -- !query schema struct -- !query output -CREATE TABLE `tbl` ( +CREATE TABLE `default`.`tbl` ( `a` INT, `b` STRING, `c` INT) @@ -135,7 +135,7 @@ SHOW CREATE TABLE tbl -- !query schema struct -- !query output -CREATE TABLE `tbl` ( +CREATE TABLE `default`.`tbl` ( `b` STRING, `c` INT, `a` INT) @@ -165,7 +165,7 @@ SHOW CREATE TABLE tbl -- !query schema struct -- !query output -CREATE TABLE `tbl` ( +CREATE TABLE `default`.`tbl` ( `a` INT, `b` STRING, `c` INT) @@ -197,7 +197,7 @@ SHOW CREATE TABLE tbl -- !query schema struct -- !query output -CREATE TABLE `tbl` ( +CREATE TABLE `default`.`tbl` ( `a` INT, `b` STRING, `c` INT) @@ -227,7 +227,7 @@ SHOW CREATE TABLE tbl -- !query schema struct -- !query output -CREATE TABLE `tbl` ( +CREATE TABLE `default`.`tbl` ( `a` INT, `b` STRING, `c` INT) @@ -257,7 +257,7 @@ SHOW CREATE TABLE tbl -- !query schema struct -- !query output -CREATE TABLE `tbl` ( +CREATE TABLE `default`.`tbl` ( `a` FLOAT, `b` DECIMAL(10,0), `c` DECIMAL(10,0), @@ -295,7 +295,7 @@ SHOW CREATE TABLE view_SPARK_30302 AS SERDE -- !query schema struct -- !query output -CREATE VIEW `view_SPARK_30302`( +CREATE VIEW `default`.`view_SPARK_30302`( `aaa`, `bbb`) AS SELECT a, b FROM tbl @@ -324,7 +324,7 @@ SHOW CREATE TABLE view_SPARK_30302 AS SERDE -- !query schema struct -- !query output -CREATE VIEW `view_SPARK_30302`( +CREATE VIEW `default`.`view_SPARK_30302`( `aaa` COMMENT 'comment with \'quoted text\' for aaa', `bbb`) COMMENT 'This is a comment with \'quoted text\' for view' @@ -354,7 +354,7 @@ SHOW CREATE TABLE view_SPARK_30302 AS SERDE -- !query schema struct -- !query output -CREATE VIEW `view_SPARK_30302`( +CREATE VIEW `default`.`view_SPARK_30302`( `aaa`, `bbb`) TBLPROPERTIES ( diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ShowCreateTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ShowCreateTableSuite.scala index b3b94f8be0d17..04257642fac81 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/ShowCreateTableSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/ShowCreateTableSuite.scala @@ -173,7 +173,7 @@ abstract class ShowCreateTableSuite extends QueryTest with SQLTestUtils { val createTable = "CREATE TABLE `t1` (`a` STRUCT<`b`: STRING>)" sql(s"$createTable USING json") val shownDDL = getShowDDL("SHOW CREATE TABLE t1") - assert(shownDDL == createTable) + assert(shownDDL == "CREATE TABLE `default`.`t1` (`a` STRUCT<`b`: STRING>)") checkCreateTable("t1") } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala index d04a1fca6387c..afc51f45c54ec 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala @@ -67,9 +67,10 @@ trait AlterTableTests extends SharedSparkSession { assert(exc.getMessage.contains("Unsupported table change")) assert(exc.getMessage.contains("Cannot drop all fields")) // from the implementation - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType().add("id", IntegerType)) } } @@ -80,9 +81,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int) USING $v2Format") sql(s"ALTER TABLE $t ADD COLUMN data string") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType().add("id", IntegerType).add("data", StringType)) } } @@ -93,9 +95,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int) USING $v2Format") sql(s"ALTER TABLE $t ADD COLUMN data string NOT NULL") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === StructType(Seq( StructField("id", IntegerType), StructField("data", StringType, nullable = false)))) @@ -108,9 +111,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int) USING $v2Format") sql(s"ALTER TABLE $t ADD COLUMN data string COMMENT 'doc'") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === StructType(Seq( StructField("id", IntegerType), StructField("data", StringType).withComment("doc")))) @@ -136,12 +140,13 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (point struct) USING $v2Format") sql(s"ALTER TABLE $t ADD COLUMN a string FIRST") - assert(getTableMetadata(t).schema == new StructType() + val tableName = fullTableName(t) + assert(getTableMetadata(tableName).schema == new StructType() .add("a", StringType) .add("point", new StructType().add("x", IntegerType))) sql(s"ALTER TABLE $t ADD COLUMN b string AFTER point") - assert(getTableMetadata(t).schema == new StructType() + assert(getTableMetadata(tableName).schema == new StructType() .add("a", StringType) .add("point", new StructType().add("x", IntegerType)) .add("b", StringType)) @@ -151,7 +156,7 @@ trait AlterTableTests extends SharedSparkSession { assert(e1.getMessage().contains("Couldn't find the reference column")) sql(s"ALTER TABLE $t ADD COLUMN point.y int FIRST") - assert(getTableMetadata(t).schema == new StructType() + assert(getTableMetadata(tableName).schema == new StructType() .add("a", StringType) .add("point", new StructType() .add("y", IntegerType) @@ -159,7 +164,7 @@ trait AlterTableTests extends SharedSparkSession { .add("b", StringType)) sql(s"ALTER TABLE $t ADD COLUMN point.z int AFTER x") - assert(getTableMetadata(t).schema == new StructType() + assert(getTableMetadata(tableName).schema == new StructType() .add("a", StringType) .add("point", new StructType() .add("y", IntegerType) @@ -179,7 +184,8 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (a string, b int, point struct) USING $v2Format") sql(s"ALTER TABLE $t ADD COLUMNS (x int AFTER a, y int AFTER x, z int AFTER y)") - assert(getTableMetadata(t).schema === new StructType() + val tableName = fullTableName(t) + assert(getTableMetadata(tableName).schema === new StructType() .add("a", StringType) .add("x", IntegerType) .add("y", IntegerType) @@ -190,7 +196,7 @@ trait AlterTableTests extends SharedSparkSession { .add("y", DoubleType))) sql(s"ALTER TABLE $t ADD COLUMNS (point.z double AFTER x, point.zz double AFTER z)") - assert(getTableMetadata(t).schema === new StructType() + assert(getTableMetadata(tableName).schema === new StructType() .add("a", StringType) .add("x", IntegerType) .add("y", IntegerType) @@ -215,9 +221,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int) USING $v2Format") sql(s"ALTER TABLE $t ADD COLUMNS data string COMMENT 'doc', ts timestamp") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === StructType(Seq( StructField("id", IntegerType), StructField("data", StringType).withComment("doc"), @@ -231,9 +238,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int, point struct) USING $v2Format") sql(s"ALTER TABLE $t ADD COLUMN point.z double") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("point", StructType(Seq( @@ -250,9 +258,10 @@ trait AlterTableTests extends SharedSparkSession { s"USING $v2Format") sql(s"ALTER TABLE $t ADD COLUMN points.key.z double") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("points", MapType(StructType(Seq( @@ -269,9 +278,10 @@ trait AlterTableTests extends SharedSparkSession { s"USING $v2Format") sql(s"ALTER TABLE $t ADD COLUMN points.value.z double") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("points", MapType(StringType, StructType(Seq( @@ -287,9 +297,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int, points array>) USING $v2Format") sql(s"ALTER TABLE $t ADD COLUMN points.element.z double") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("points", ArrayType(StructType(Seq( @@ -305,9 +316,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int) USING $v2Format") sql(s"ALTER TABLE $t ADD COLUMN points array>") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("points", ArrayType(StructType(Seq( @@ -322,9 +334,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int, points array>) USING $v2Format") sql(s"ALTER TABLE $t ADD COLUMN points.element.z double COMMENT 'doc'") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("points", ArrayType(StructType(Seq( @@ -378,8 +391,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int) USING $v2Format") sql(s"ALTER TABLE $t ALTER COLUMN id TYPE bigint") - val table = getTableMetadata(t) - assert(table.name === fullTableName(t)) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) + + assert(table.name === tableName) assert(table.schema === new StructType().add("id", LongType)) } } @@ -399,13 +414,14 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id bigint NOT NULL) USING $v2Format") sql(s"ALTER TABLE $t ALTER COLUMN id SET NOT NULL") - val table = getTableMetadata(t) - assert(table.name === fullTableName(t)) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) + assert(table.name === tableName) assert(table.schema === new StructType().add("id", LongType, nullable = false)) sql(s"ALTER TABLE $t ALTER COLUMN id DROP NOT NULL") - val table2 = getTableMetadata(t) - assert(table2.name === fullTableName(t)) + val table2 = getTableMetadata(tableName) + assert(table2.name === tableName) assert(table2.schema === new StructType().add("id", LongType)) val e = intercept[AnalysisException] { @@ -421,8 +437,9 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int, point struct) USING $v2Format") sql(s"ALTER TABLE $t ALTER COLUMN point.x TYPE double") - val table = getTableMetadata(t) - assert(table.name === fullTableName(t)) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("point", StructType(Seq( @@ -443,9 +460,10 @@ trait AlterTableTests extends SharedSparkSession { assert(exc.getMessage.contains("point")) assert(exc.getMessage.contains("update a struct by updating its fields")) - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("point", StructType(Seq( @@ -465,9 +483,10 @@ trait AlterTableTests extends SharedSparkSession { assert(exc.getMessage.contains("update the element by updating points.element")) - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("points", ArrayType(IntegerType))) @@ -480,9 +499,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int, points array) USING $v2Format") sql(s"ALTER TABLE $t ALTER COLUMN points.element TYPE long") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("points", ArrayType(LongType))) @@ -500,9 +520,10 @@ trait AlterTableTests extends SharedSparkSession { assert(exc.getMessage.contains("update a map by updating m.key or m.value")) - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("m", MapType(StringType, IntegerType))) @@ -515,9 +536,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int, m map) USING $v2Format") sql(s"ALTER TABLE $t ALTER COLUMN m.value TYPE long") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("m", MapType(StringType, LongType))) @@ -531,9 +553,10 @@ trait AlterTableTests extends SharedSparkSession { s"USING $v2Format") sql(s"ALTER TABLE $t ALTER COLUMN points.key.x TYPE double") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("points", MapType(StructType(Seq( @@ -549,9 +572,10 @@ trait AlterTableTests extends SharedSparkSession { s"USING $v2Format") sql(s"ALTER TABLE $t ALTER COLUMN points.value.x TYPE double") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("points", MapType(StringType, StructType(Seq( @@ -566,9 +590,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int, points array>) USING $v2Format") sql(s"ALTER TABLE $t ALTER COLUMN points.element.x TYPE double") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("points", ArrayType(StructType(Seq( @@ -625,9 +650,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int) USING $v2Format") sql(s"ALTER TABLE $t ALTER COLUMN id COMMENT 'doc'") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === StructType(Seq(StructField("id", IntegerType).withComment("doc")))) } } @@ -638,7 +664,8 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (a int, b int, point struct) USING $v2Format") sql(s"ALTER TABLE $t ALTER COLUMN b FIRST") - assert(getTableMetadata(t).schema == new StructType() + val tableName = fullTableName(t) + assert(getTableMetadata(tableName).schema == new StructType() .add("b", IntegerType) .add("a", IntegerType) .add("point", new StructType() @@ -647,7 +674,7 @@ trait AlterTableTests extends SharedSparkSession { .add("z", IntegerType))) sql(s"ALTER TABLE $t ALTER COLUMN b AFTER point") - assert(getTableMetadata(t).schema == new StructType() + assert(getTableMetadata(tableName).schema == new StructType() .add("a", IntegerType) .add("point", new StructType() .add("x", IntegerType) @@ -660,7 +687,7 @@ trait AlterTableTests extends SharedSparkSession { assert(e1.getMessage.contains("Couldn't resolve positional argument")) sql(s"ALTER TABLE $t ALTER COLUMN point.y FIRST") - assert(getTableMetadata(t).schema == new StructType() + assert(getTableMetadata(tableName).schema == new StructType() .add("a", IntegerType) .add("point", new StructType() .add("y", IntegerType) @@ -669,7 +696,7 @@ trait AlterTableTests extends SharedSparkSession { .add("b", IntegerType)) sql(s"ALTER TABLE $t ALTER COLUMN point.y AFTER z") - assert(getTableMetadata(t).schema == new StructType() + assert(getTableMetadata(tableName).schema == new StructType() .add("a", IntegerType) .add("point", new StructType() .add("x", IntegerType) @@ -693,9 +720,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int, point struct) USING $v2Format") sql(s"ALTER TABLE $t ALTER COLUMN point.y COMMENT 'doc'") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("point", StructType(Seq( @@ -711,9 +739,10 @@ trait AlterTableTests extends SharedSparkSession { s"USING $v2Format") sql(s"ALTER TABLE $t ALTER COLUMN points.key.y COMMENT 'doc'") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("points", MapType(StructType(Seq( @@ -729,9 +758,10 @@ trait AlterTableTests extends SharedSparkSession { s"USING $v2Format") sql(s"ALTER TABLE $t ALTER COLUMN points.value.y COMMENT 'doc'") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("points", MapType(StringType, StructType(Seq( @@ -746,9 +776,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int, points array>) USING $v2Format") sql(s"ALTER TABLE $t ALTER COLUMN points.element.y COMMENT 'doc'") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("points", ArrayType(StructType(Seq( @@ -791,9 +822,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int) USING $v2Format") sql(s"ALTER TABLE $t RENAME COLUMN id TO user_id") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType().add("user_id", IntegerType)) } } @@ -804,9 +836,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int, point struct) USING $v2Format") sql(s"ALTER TABLE $t RENAME COLUMN point.y TO t") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("point", StructType(Seq( @@ -822,9 +855,10 @@ trait AlterTableTests extends SharedSparkSession { s"USING $v2Format") sql(s"ALTER TABLE $t RENAME COLUMN point.key.y TO t") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("point", MapType(StructType(Seq( @@ -840,9 +874,10 @@ trait AlterTableTests extends SharedSparkSession { s"USING $v2Format") sql(s"ALTER TABLE $t RENAME COLUMN points.value.y TO t") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("points", MapType(StringType, StructType(Seq( @@ -857,9 +892,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int, points array>) USING $v2Format") sql(s"ALTER TABLE $t RENAME COLUMN points.element.y TO t") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("points", ArrayType(StructType(Seq( @@ -933,9 +969,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int, data string) USING $v2Format") sql(s"ALTER TABLE $t DROP COLUMN data") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType().add("id", IntegerType)) } } @@ -947,9 +984,10 @@ trait AlterTableTests extends SharedSparkSession { s"USING $v2Format") sql(s"ALTER TABLE $t DROP COLUMN point.t") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("point", StructType(Seq( @@ -965,9 +1003,10 @@ trait AlterTableTests extends SharedSparkSession { s"USING $v2Format") sql(s"ALTER TABLE $t DROP COLUMN point.key.y") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("point", MapType(StructType(Seq( @@ -982,9 +1021,10 @@ trait AlterTableTests extends SharedSparkSession { s"USING $v2Format") sql(s"ALTER TABLE $t DROP COLUMN points.value.y") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("points", MapType(StringType, StructType(Seq( @@ -998,9 +1038,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int, points array>) USING $v2Format") sql(s"ALTER TABLE $t DROP COLUMN points.element.y") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === new StructType() .add("id", IntegerType) .add("points", ArrayType(StructType(Seq( @@ -1042,9 +1083,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int) USING $v2Format") sql(s"ALTER TABLE $t SET LOCATION 's3://bucket/path'") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.properties === withDefaultOwnership(Map("provider" -> v2Format, "location" -> "s3://bucket/path")).asJava) } @@ -1069,9 +1111,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (id int) USING $v2Format") sql(s"ALTER TABLE $t SET TBLPROPERTIES ('test'='34')") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.properties === withDefaultOwnership(Map("provider" -> v2Format, "test" -> "34")).asJava) } @@ -1082,17 +1125,18 @@ trait AlterTableTests extends SharedSparkSession { withTable(t) { sql(s"CREATE TABLE $t (id int) USING $v2Format TBLPROPERTIES('test' = '34')") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.properties === withDefaultOwnership(Map("provider" -> v2Format, "test" -> "34")).asJava) sql(s"ALTER TABLE $t UNSET TBLPROPERTIES ('test')") - val updated = getTableMetadata(t) + val updated = getTableMetadata(tableName) - assert(updated.name === fullTableName(t)) + assert(updated.name === tableName) assert(updated.properties === withDefaultOwnership(Map("provider" -> v2Format)).asJava) } } @@ -1103,9 +1147,10 @@ trait AlterTableTests extends SharedSparkSession { sql(s"CREATE TABLE $t (col1 int, col2 int COMMENT 'c2') USING $v2Format") sql(s"ALTER TABLE $t REPLACE COLUMNS (col2 string, col3 int COMMENT 'c3')") - val table = getTableMetadata(t) + val tableName = fullTableName(t) + val table = getTableMetadata(tableName) - assert(table.name === fullTableName(t)) + assert(table.name === tableName) assert(table.schema === StructType(Seq( StructField("col2", StringType), StructField("col3", IntegerType).withComment("c3")))) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2DataFrameSessionCatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2DataFrameSessionCatalogSuite.scala index 01caf8e2eb115..6b25d7c61663c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2DataFrameSessionCatalogSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2DataFrameSessionCatalogSuite.scala @@ -85,7 +85,7 @@ class DataSourceV2DataFrameSessionCatalogSuite withTable(t1) { spark.range(20).write.format(v2Format).option("path", "abc").saveAsTable(t1) val cat = spark.sessionState.catalogManager.currentCatalog.asInstanceOf[TableCatalog] - val tableInfo = cat.loadTable(Identifier.of(Array.empty, t1)) + val tableInfo = cat.loadTable(Identifier.of(Array("default"), t1)) assert(tableInfo.properties().get("location") === "abc") assert(tableInfo.properties().get("provider") === v2Format) } @@ -109,8 +109,7 @@ class InMemoryTableSessionCatalog extends TestV2SessionCatalogBase[InMemoryTable } override def alterTable(ident: Identifier, changes: TableChange*): Table = { - val fullIdent = fullIdentifier(ident) - Option(tables.get(fullIdent)) match { + Option(tables.get(ident)) match { case Some(table) => val properties = CatalogV2Util.applyPropertiesChanges(table.properties, changes) val schema = CatalogV2Util.applySchemaChanges(table.schema, changes) @@ -123,7 +122,7 @@ class InMemoryTableSessionCatalog extends TestV2SessionCatalogBase[InMemoryTable val newTable = new InMemoryTable(table.name, schema, table.partitioning, properties) .withData(table.data) - tables.put(fullIdent, newTable) + tables.put(ident, newTable) newTable case _ => diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSessionCatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSessionCatalogSuite.scala index b6997445013e5..249b27c28b072 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSessionCatalogSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSessionCatalogSuite.scala @@ -47,7 +47,7 @@ class DataSourceV2SQLSessionCatalogSuite val v2Catalog = spark.sessionState.catalogManager.currentCatalog val nameParts = spark.sessionState.sqlParser.parseMultipartIdentifier(tableName) v2Catalog.asInstanceOf[TableCatalog] - .loadTable(Identifier.of(Array.empty, nameParts.last)) + .loadTable(Identifier.of(nameParts.init.toArray, nameParts.last)) } test("SPARK-30697: catalog.isView doesn't throw an error for specialized identifiers") { @@ -55,7 +55,7 @@ class DataSourceV2SQLSessionCatalogSuite withTable(t1) { sql(s"CREATE TABLE $t1 (id bigint, data string) USING $v2Format") - def idResolver(id: Identifier): Identifier = Identifier.of(Array.empty, id.name()) + def idResolver(id: Identifier): Identifier = Identifier.of(Array("default"), id.name()) InMemoryTableSessionCatalog.withCustomIdentifierResolver(idResolver) { // The following should not throw AnalysisException. diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala index 2acd7e39fe949..f642114983293 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala @@ -172,7 +172,7 @@ class DataSourceV2SQLSuite spark.sql(s"CREATE TABLE table_name (id bigint, data string) USING $v2Source") val testCatalog = catalog(SESSION_CATALOG_NAME).asTableCatalog - val table = testCatalog.loadTable(Identifier.of(Array(), "table_name")) + val table = testCatalog.loadTable(Identifier.of(Array("default"), "table_name")) assert(table.name == "default.table_name") assert(table.partitioning.isEmpty) @@ -453,7 +453,7 @@ class DataSourceV2SQLSuite spark.sql(s"CREATE TABLE table_name USING $v2Source AS SELECT id, data FROM source") val testCatalog = catalog(SESSION_CATALOG_NAME).asTableCatalog - val table = testCatalog.loadTable(Identifier.of(Array(), "table_name")) + val table = testCatalog.loadTable(Identifier.of(Array("default"), "table_name")) assert(table.name == "default.table_name") assert(table.partitioning.isEmpty) @@ -565,7 +565,7 @@ class DataSourceV2SQLSuite // The fact that the following line doesn't throw an exception means, the session catalog // can load the table. val t = catalog(SESSION_CATALOG_NAME).asTableCatalog - .loadTable(Identifier.of(Array.empty, "table_name")) + .loadTable(Identifier.of(Array("default"), "table_name")) assert(t.isInstanceOf[V1Table], "V1 table wasn't returned as an unresolved table") } @@ -605,10 +605,10 @@ class DataSourceV2SQLSuite } test("DropTable: table qualified with the session catalog name") { - val ident = Identifier.of(Array(), "tbl") + val ident = Identifier.of(Array("default"), "tbl") sql("CREATE TABLE tbl USING json AS SELECT 1 AS i") assert(catalog("spark_catalog").asTableCatalog.tableExists(ident) === true) - sql("DROP TABLE spark_catalog.tbl") + sql("DROP TABLE spark_catalog.default.tbl") assert(catalog("spark_catalog").asTableCatalog.tableExists(ident) === false) } @@ -705,7 +705,6 @@ class DataSourceV2SQLSuite withTable("t") { sql("CREATE TABLE t USING json AS SELECT 1 AS i") - checkAnswer(sql("select default.t.i from spark_catalog.t"), Row(1)) checkAnswer(sql("select t.i from spark_catalog.default.t"), Row(1)) checkAnswer(sql("select default.t.i from spark_catalog.default.t"), Row(1)) @@ -1378,7 +1377,12 @@ class DataSourceV2SQLSuite val sessionCatalog = catalog(SESSION_CATALOG_NAME).asTableCatalog def checkPartitioning(cat: TableCatalog, partition: String): Unit = { - val table = cat.loadTable(Identifier.of(Array.empty, "tbl")) + val namespace = if (cat.name == SESSION_CATALOG_NAME) { + Array("default") + } else { + Array[String]() + } + val table = cat.loadTable(Identifier.of(namespace, "tbl")) val partitions = table.partitioning().map(_.references()) assert(partitions.length === 1) val fieldNames = partitions.flatMap(_.map(_.fieldNames())) @@ -1414,48 +1418,48 @@ class DataSourceV2SQLSuite } test("tableCreation: duplicate column names in the table definition") { - val errorMsg = "Found duplicate column(s) in the table definition of t" + val errorMsg = "Found duplicate column(s) in the table definition of" Seq((true, ("a", "a")), (false, ("aA", "Aa"))).foreach { case (caseSensitive, (c0, c1)) => withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive.toString) { assertAnalysisError( s"CREATE TABLE t ($c0 INT, $c1 INT) USING $v2Source", - errorMsg + s"$errorMsg default.t" ) assertAnalysisError( s"CREATE TABLE testcat.t ($c0 INT, $c1 INT) USING $v2Source", - errorMsg + s"$errorMsg t" ) assertAnalysisError( s"CREATE OR REPLACE TABLE t ($c0 INT, $c1 INT) USING $v2Source", - errorMsg + s"$errorMsg default.t" ) assertAnalysisError( s"CREATE OR REPLACE TABLE testcat.t ($c0 INT, $c1 INT) USING $v2Source", - errorMsg + s"$errorMsg t" ) } } } test("tableCreation: duplicate nested column names in the table definition") { - val errorMsg = "Found duplicate column(s) in the table definition of t" + val errorMsg = "Found duplicate column(s) in the table definition of" Seq((true, ("a", "a")), (false, ("aA", "Aa"))).foreach { case (caseSensitive, (c0, c1)) => withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive.toString) { assertAnalysisError( s"CREATE TABLE t (d struct<$c0: INT, $c1: INT>) USING $v2Source", - errorMsg + s"$errorMsg default.t" ) assertAnalysisError( s"CREATE TABLE testcat.t (d struct<$c0: INT, $c1: INT>) USING $v2Source", - errorMsg + s"$errorMsg t" ) assertAnalysisError( s"CREATE OR REPLACE TABLE t (d struct<$c0: INT, $c1: INT>) USING $v2Source", - errorMsg + s"$errorMsg default.t" ) assertAnalysisError( s"CREATE OR REPLACE TABLE testcat.t (d struct<$c0: INT, $c1: INT>) USING $v2Source", - errorMsg + s"$errorMsg t" ) } } @@ -1786,7 +1790,7 @@ class DataSourceV2SQLSuite withTable(t) { spark.sql(s"CREATE TABLE $t (id bigint, data string) USING foo") testV1Command("ANALYZE TABLE", s"$t COMPUTE STATISTICS") - testV1Command("ANALYZE TABLE", s"$t COMPUTE STATISTICS FOR ALL COLUMNS") + testV1CommandSupportingTempView("ANALYZE TABLE", s"$t COMPUTE STATISTICS FOR ALL COLUMNS") } } @@ -1850,7 +1854,7 @@ class DataSourceV2SQLSuite val t = "testcat.ns1.ns2.tbl" withTable(t) { spark.sql(s"CREATE TABLE $t (id bigint, data string) USING foo") - testV1Command("SHOW CREATE TABLE", t) + testV1CommandSupportingTempView("SHOW CREATE TABLE", t) } } @@ -1883,8 +1887,8 @@ class DataSourceV2SQLSuite withTable(t) { spark.sql(s"CREATE TABLE $t (id bigint, data string) USING foo") - testV1Command("SHOW COLUMNS", s"FROM $t") - testV1Command("SHOW COLUMNS", s"IN $t") + testV1CommandSupportingTempView("SHOW COLUMNS", s"FROM $t") + testV1CommandSupportingTempView("SHOW COLUMNS", s"IN $t") val e3 = intercept[AnalysisException] { sql(s"SHOW COLUMNS FROM tbl IN testcat.ns1.ns2") @@ -2121,23 +2125,39 @@ class DataSourceV2SQLSuite withTable("t") { sql("CREATE TABLE t USING json AS SELECT 1 AS i") checkAnswer(sql("select * from t"), Row(1)) - checkAnswer(sql("select * from spark_catalog.t"), Row(1)) checkAnswer(sql("select * from spark_catalog.default.t"), Row(1)) } } + test("SPARK-30885: v1 table name should be fully qualified") { + def assertWrongTableIdent(): Unit = { + withTable("t") { + sql("CREATE TABLE t USING json AS SELECT 1 AS i") + val e = intercept[AnalysisException] { + sql("select * from spark_catalog.t") + } + assert(e.message.contains("Table or view not found: spark_catalog.t")) + } + } + + assertWrongTableIdent() + // unset this config to use the default v2 session catalog. + spark.conf.unset(V2_SESSION_CATALOG_IMPLEMENTATION.key) + assertWrongTableIdent() + } + test("SPARK-30259: session catalog can be specified in CREATE TABLE AS SELECT command") { withTable("tbl") { - val ident = Identifier.of(Array(), "tbl") - sql("CREATE TABLE spark_catalog.tbl USING json AS SELECT 1 AS i") + val ident = Identifier.of(Array("default"), "tbl") + sql("CREATE TABLE spark_catalog.default.tbl USING json AS SELECT 1 AS i") assert(catalog("spark_catalog").asTableCatalog.tableExists(ident) === true) } } test("SPARK-30259: session catalog can be specified in CREATE TABLE command") { withTable("tbl") { - val ident = Identifier.of(Array(), "tbl") - sql("CREATE TABLE spark_catalog.tbl (col string) USING json") + val ident = Identifier.of(Array("default"), "tbl") + sql("CREATE TABLE spark_catalog.default.tbl (col string) USING json") assert(catalog("spark_catalog").asTableCatalog.tableExists(ident) === true) } } @@ -2146,7 +2166,7 @@ class DataSourceV2SQLSuite // unset this config to use the default v2 session catalog. spark.conf.unset(V2_SESSION_CATALOG_IMPLEMENTATION.key) - withTable("spark_catalog.t", "testcat.ns.t") { + withTable("spark_catalog.default.t", "testcat.ns.t") { sql("CREATE TABLE t USING parquet AS SELECT 1") sql("CREATE TABLE testcat.ns.t USING parquet AS SELECT 2") @@ -2168,17 +2188,18 @@ class DataSourceV2SQLSuite withTempView("t") { spark.range(10).createTempView("t") - withView(s"$sessionCatalogName.v") { + withView(s"$sessionCatalogName.default.v") { val e = intercept[AnalysisException] { - sql(s"CREATE VIEW $sessionCatalogName.v AS SELECT * FROM t") + sql(s"CREATE VIEW $sessionCatalogName.default.v AS SELECT * FROM t") } assert(e.message.contains("referencing a temporary view")) } } withTempView("t") { - withView(s"$sessionCatalogName.v") { - sql(s"CREATE VIEW $sessionCatalogName.v AS SELECT t1.col FROM t t1 JOIN ns1.ns2.t t2") + withView(s"$sessionCatalogName.default.v") { + sql(s"CREATE VIEW $sessionCatalogName.default.v " + + "AS SELECT t1.col FROM t t1 JOIN ns1.ns2.t t2") sql(s"USE $sessionCatalogName") // The view should read data from table `testcat.ns1.ns2.t` not the temp view. spark.range(10).createTempView("t") diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/SupportsCatalogOptionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/SupportsCatalogOptionsSuite.scala index 7bff955b18360..9693a10f9afca 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/SupportsCatalogOptionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/SupportsCatalogOptionsSuite.scala @@ -75,7 +75,12 @@ class SupportsCatalogOptionsSuite extends QueryTest with SharedSparkSession with withCatalogOption.foreach(cName => dfw.option("catalog", cName)) dfw.partitionBy(partitionBy: _*).save() - val table = catalog(withCatalogOption.getOrElse(SESSION_CATALOG_NAME)).loadTable("t1") + val ident = if (withCatalogOption.isEmpty) { + Identifier.of(Array("default"), "t1") + } else { + Identifier.of(Array(), "t1") + } + val table = catalog(withCatalogOption.getOrElse(SESSION_CATALOG_NAME)).loadTable(ident) val namespace = withCatalogOption.getOrElse("default") assert(table.name() === s"$namespace.t1", "Table identifier was wrong") assert(table.partitioning().length === partitionBy.length, "Partitioning did not match") @@ -134,7 +139,7 @@ class SupportsCatalogOptionsSuite extends QueryTest with SharedSparkSession with val dfw = df.write.format(format).mode(SaveMode.Ignore).option("name", "t1") dfw.save() - val table = catalog(SESSION_CATALOG_NAME).loadTable("t1") + val table = catalog(SESSION_CATALOG_NAME).loadTable(Identifier.of(Array("default"), "t1")) assert(table.partitioning().isEmpty, "Partitioning should be empty") assert(table.schema() === new StructType().add("id", LongType), "Schema did not match") assert(load("t1", None).count() === 0) @@ -279,7 +284,12 @@ class CatalogSupportingInMemoryTableProvider override def extractIdentifier(options: CaseInsensitiveStringMap): Identifier = { val name = options.get("name") assert(name != null, "The name should be provided for this table") - Identifier.of(Array.empty, name) + val namespace = if (options.containsKey("catalog")) { + Array[String]() + } else { + Array("default") + } + Identifier.of(namespace, name) } override def extractCatalog(options: CaseInsensitiveStringMap): String = { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/TestV2SessionCatalogBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/TestV2SessionCatalogBase.scala index 3f6ac0b7f8d3c..637cf2fd16515 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/TestV2SessionCatalogBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/TestV2SessionCatalogBase.scala @@ -22,6 +22,7 @@ import java.util.concurrent.ConcurrentHashMap import scala.collection.JavaConverters._ +import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.connector.catalog.{DelegatingCatalogExtension, Identifier, Table} import org.apache.spark.sql.connector.expressions.Transform import org.apache.spark.sql.types.StructType @@ -41,23 +42,14 @@ private[connector] trait TestV2SessionCatalogBase[T <: Table] extends Delegating partitions: Array[Transform], properties: util.Map[String, String]): T - protected def fullIdentifier(ident: Identifier): Identifier = { - if (ident.namespace().isEmpty) { - Identifier.of(Array("default"), ident.name()) - } else { - ident - } - } - override def loadTable(ident: Identifier): Table = { - val fullIdent = fullIdentifier(ident) - if (tables.containsKey(fullIdent)) { - tables.get(fullIdent) + if (tables.containsKey(ident)) { + tables.get(ident) } else { // Table was created through the built-in catalog - val t = super.loadTable(fullIdent) + val t = super.loadTable(ident) val table = newTable(t.name(), t.schema(), t.partitioning(), t.properties()) - tables.put(fullIdent, table) + tables.put(ident, table) table } } @@ -69,13 +61,12 @@ private[connector] trait TestV2SessionCatalogBase[T <: Table] extends Delegating properties: util.Map[String, String]): Table = { val created = super.createTable(ident, schema, partitions, properties) val t = newTable(created.name(), schema, partitions, properties) - val fullIdent = fullIdentifier(ident) - tables.put(fullIdent, t) + tables.put(ident, t) t } override def dropTable(ident: Identifier): Boolean = { - tables.remove(fullIdentifier(ident)) + tables.remove(ident) super.dropTable(ident) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala index 9a393f19ce9bb..575efec364812 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala @@ -79,7 +79,7 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { var e = intercept[AnalysisException] { sql("CREATE VIEW jtv1 AS SELECT * FROM temp_jtv1 WHERE id < 6") }.getMessage - assert(e.contains("Not allowed to create a permanent view `jtv1` by " + + assert(e.contains("Not allowed to create a permanent view `default`.`jtv1` by " + "referencing a temporary view temp_jtv1. " + "Please create a temp view instead by CREATE TEMP VIEW")) @@ -88,8 +88,8 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { e = intercept[AnalysisException] { sql(s"CREATE VIEW jtv1 AS SELECT * FROM $globalTempDB.global_temp_jtv1 WHERE id < 6") }.getMessage - assert(e.contains(s"Not allowed to create a permanent view `jtv1` by referencing " + - s"a temporary view global_temp.global_temp_jtv1")) + assert(e.contains("Not allowed to create a permanent view `default`.`jtv1` by " + + "referencing a temporary view global_temp.global_temp_jtv1")) } } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index dbf4b09403423..e4bf721b1d58d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -473,17 +473,12 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils { withEmptyDirInTablePath("tab1") { tableLoc => val hiddenGarbageFile = new File(tableLoc.getCanonicalPath, ".garbage") hiddenGarbageFile.createNewFile() - val exMsg = "Can not create the managed table('`tab1`'). The associated location" val exMsgWithDefaultDB = "Can not create the managed table('`default`.`tab1`'). The associated location" var ex = intercept[AnalysisException] { sql(s"CREATE TABLE tab1 USING ${dataSource} AS SELECT 1, 'a'") }.getMessage - if (isUsingHiveMetastore) { - assert(ex.contains(exMsgWithDefaultDB)) - } else { - assert(ex.contains(exMsg)) - } + assert(ex.contains(exMsgWithDefaultDB)) ex = intercept[AnalysisException] { sql(s"CREATE TABLE tab1 (col1 int, col2 string) USING ${dataSource}") @@ -509,8 +504,8 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils { val ex = intercept[AnalysisException] { sql("ALTER TABLE tab1 RENAME TO tab2") }.getMessage - val expectedMsg = "Can not rename the managed table('`tab1`'). The associated location" - assert(ex.contains(expectedMsg)) + assert(ex.contains( + "Can not rename the managed table('`default`.`tab1`'). The associated location")) } } } @@ -640,7 +635,8 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils { val errMsg = intercept[AnalysisException] { sql(s"CREATE TABLE t($c0 INT, $c1 INT) USING parquet") }.getMessage - assert(errMsg.contains("Found duplicate column(s) in the table definition of `t`")) + assert(errMsg.contains( + "Found duplicate column(s) in the table definition of `default`.`t`")) } } } @@ -649,7 +645,7 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils { val e = intercept[AnalysisException] { sql("CREATE TABLE tbl(a int, b string) USING json PARTITIONED BY (c)") } - assert(e.message == "partition column c is not defined in table tbl, " + + assert(e.message == "partition column c is not defined in table default.tbl, " + "defined table columns are: a, b") } @@ -657,7 +653,7 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils { val e = intercept[AnalysisException] { sql("CREATE TABLE tbl(a int, b string) USING json CLUSTERED BY (c) INTO 4 BUCKETS") } - assert(e.message == "bucket column c is not defined in table tbl, " + + assert(e.message == "bucket column c is not defined in table default.tbl, " + "defined table columns are: a, b") } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala index 7b3fb68174234..fc793534641df 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala @@ -102,10 +102,10 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with TestHiveSingleto } test("uncache of nonexistent tables") { - val expectedErrorMsg = "Table or view not found: nonexistentTable" + val expectedErrorMsg = "Table or view not found:" // make sure table doesn't exist var e = intercept[AnalysisException](spark.table("nonexistentTable")).getMessage - assert(e.contains(expectedErrorMsg)) + assert(e.contains(s"$expectedErrorMsg nonexistentTable")) e = intercept[AnalysisException] { uncacheTable("nonexistentTable") }.getMessage @@ -113,7 +113,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with TestHiveSingleto e = intercept[AnalysisException] { sql("UNCACHE TABLE nonexistentTable") }.getMessage - assert(e.contains(expectedErrorMsg)) + assert(e.contains(s"$expectedErrorMsg default.nonexistentTable")) sql("UNCACHE TABLE IF EXISTS nonexistentTable") } @@ -403,13 +403,13 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with TestHiveSingleto // Cache the table 'cachedTable' in default db without qualified table name , and then // check whether the table is cached with expected name. sql("CACHE TABLE cachedTable OPTIONS('storageLevel' 'DISK_ONLY')") - assertCached(sql("SELECT * FROM cachedTable"), "`cachedTable`", DISK_ONLY) + assertCached(sql("SELECT * FROM cachedTable"), "`default`.`cachedTable`", DISK_ONLY) assert(spark.catalog.isCached("cachedTable"), "Table 'cachedTable' should be cached.") // Refresh the table 'cachedTable' in default db with unqualified table name, and then // check whether the table is still cached with the same name. sql("REFRESH TABLE cachedTable") - assertCached(sql("SELECT * FROM cachedTable"), "`cachedTable`", DISK_ONLY) + assertCached(sql("SELECT * FROM cachedTable"), "`default`.`cachedTable`", DISK_ONLY) assert(spark.catalog.isCached("cachedTable"), "Table 'cachedTable' should be cached after refreshing with its unqualified name.") @@ -420,7 +420,8 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with TestHiveSingleto // 'default.cachedTable', instead of 'cachedTable' activateDatabase(db) { sql("REFRESH TABLE default.cachedTable") - assertCached(sql("SELECT * FROM default.cachedTable"), "`cachedTable`", DISK_ONLY) + assertCached( + sql("SELECT * FROM default.cachedTable"), "`default`.`cachedTable`", DISK_ONLY) assert(spark.catalog.isCached("default.cachedTable"), "Table 'cachedTable' should be cached after refreshing with its qualified name.") } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveShowCreateTableSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveShowCreateTableSuite.scala index e5d572c90af38..99db1e3cbd18c 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveShowCreateTableSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveShowCreateTableSuite.scala @@ -219,7 +219,7 @@ class HiveShowCreateTableSuite extends ShowCreateTableSuite with TestHiveSinglet val createTable = "CREATE TABLE `t1` (`a` STRUCT<`b`: STRING>) USING hive" sql(createTable) val shownDDL = getShowDDL("SHOW CREATE TABLE t1") - assert(shownDDL == createTable.dropRight(" USING hive".length)) + assert(shownDDL == "CREATE TABLE `default`.`t1` (`a` STRUCT<`b`: STRING>)") checkCreateHiveTableOrView("t1") } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSQLViewSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSQLViewSuite.scala index 5e6e114fc3fdc..fa43ff14fd796 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSQLViewSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSQLViewSuite.scala @@ -80,8 +80,8 @@ class HiveSQLViewSuite extends SQLViewSuite with TestHiveSingleton { val e = intercept[AnalysisException] { sql(s"CREATE VIEW view1 AS SELECT $tempFunctionName(id) from tab1") }.getMessage - assert(e.contains("Not allowed to create a permanent view `view1` by referencing " + - s"a temporary function `$tempFunctionName`")) + assert(e.contains("Not allowed to create a permanent view `default`.`view1` by " + + s"referencing a temporary function `$tempFunctionName`")) } } }