Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-20439] [SQL] Fix Catalog API listTables and getTable when failed to fetch table metadata #17730

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.spark.sql.internal

import scala.reflect.runtime.universe.TypeTag
import scala.util.control.NonFatal

import org.apache.spark.annotation.Experimental
import org.apache.spark.sql._
Expand Down Expand Up @@ -98,14 +99,27 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
CatalogImpl.makeDataset(tables, sparkSession)
}

/**
* Returns a Table for the given table/view or temporary view.
*
* Note that this function requires the table already exists in the Catalog.
*
* If the table metadata retrieval failed due to any reason (e.g., table serde class
* is not accessible or the table type is not accepted by Spark SQL), this function
* still returns the corresponding Table without the description and tableType)
*/
private def makeTable(tableIdent: TableIdentifier): Table = {
val metadata = sessionCatalog.getTempViewOrPermanentTableMetadata(tableIdent)
val metadata = try {
Some(sessionCatalog.getTempViewOrPermanentTableMetadata(tableIdent))
} catch {
case NonFatal(_) => None
}
val isTemp = sessionCatalog.isTemporaryTable(tableIdent)
new Table(
name = tableIdent.table,
database = metadata.identifier.database.orNull,
description = metadata.comment.orNull,
tableType = if (isTemp) "TEMPORARY" else metadata.tableType.name,
database = metadata.map(_.identifier.database).getOrElse(tableIdent.database).orNull,
description = metadata.map(_.comment.orNull).orNull,
tableType = if (isTemp) "TEMPORARY" else metadata.map(_.tableType.name).orNull,
isTemporary = isTemp)
}

Expand Down Expand Up @@ -197,7 +211,11 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
* `AnalysisException` when no `Table` can be found.
*/
override def getTable(dbName: String, tableName: String): Table = {
makeTable(TableIdentifier(tableName, Option(dbName)))
if (tableExists(dbName, tableName)) {
makeTable(TableIdentifier(tableName, Option(dbName)))
} else {
throw new AnalysisException(s"Table or view '$tableName' not found in database '$dbName'")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The doc says This throws an AnalysisException when no Table can be found., I think we should not change this behavior

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes. That is why an AnalysisException is issued here. makeTable eats the expected exception.

}
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1197,6 +1197,14 @@ class HiveDDLSuite
s"CREATE INDEX $indexName ON TABLE $tabName (a) AS 'COMPACT' WITH DEFERRED REBUILD")
val indexTabName =
spark.sessionState.catalog.listTables("default", s"*$indexName*").head.table

// Even if index tables exist, listTables and getTable APIs should still work
checkAnswer(
spark.catalog.listTables().toDF(),
Row(indexTabName, "default", null, null, false) ::
Row(tabName, "default", null, "MANAGED", false) :: Nil)
assert(spark.catalog.getTable("default", indexTabName).name === indexTabName)

intercept[TableAlreadyExistsException] {
sql(s"CREATE TABLE $indexTabName(b int)")
}
Expand Down