-
Notifications
You must be signed in to change notification settings - Fork 21
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #124 from osopardo1/42-table-writes-catalog
Add Table properties to Qbeast
- Loading branch information
Showing
39 changed files
with
2,721 additions
and
243 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
41 changes: 41 additions & 0 deletions
41
src/main/scala/io/qbeast/spark/internal/rules/QbeastAnalysis.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
/* | ||
* Copyright 2021 Qbeast Analytics, S.L. | ||
*/ | ||
package io.qbeast.spark.internal.rules | ||
|
||
import io.qbeast.spark.internal.sources.v2.QbeastTableImpl | ||
import org.apache.spark.sql.SparkSession | ||
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan | ||
import org.apache.spark.sql.catalyst.rules.Rule | ||
import org.apache.spark.sql.execution.datasources.LogicalRelation | ||
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation | ||
|
||
/** | ||
* Analyzes and resolves the Spark Plan before Optimization | ||
* @param spark the SparkSession | ||
*/ | ||
class QbeastAnalysis(spark: SparkSession) extends Rule[LogicalPlan] { | ||
|
||
/** | ||
* Returns the V1Relation from a V2Relation | ||
* @param dataSourceV2Relation the V2Relation | ||
* @param table the underlying table | ||
* @return the LogicalRelation | ||
*/ | ||
private def toV1Relation( | ||
dataSourceV2Relation: DataSourceV2Relation, | ||
table: QbeastTableImpl): LogicalRelation = { | ||
|
||
val underlyingRelation = table.toBaseRelation | ||
LogicalRelation(underlyingRelation, dataSourceV2Relation.output, None, isStreaming = false) | ||
|
||
} | ||
|
||
override def apply(plan: LogicalPlan): LogicalPlan = plan transformDown { | ||
// This rule is a hack to return a V1 relation for reading | ||
// Because we didn't implemented SupportsRead on QbeastTableImpl yet | ||
case v2Relation @ DataSourceV2Relation(t: QbeastTableImpl, _, _, _, _) => | ||
toV1Relation(v2Relation, t) | ||
} | ||
|
||
} |
39 changes: 39 additions & 0 deletions
39
src/main/scala/io/qbeast/spark/internal/rules/SaveAsTableRule.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
/* | ||
* Copyright 2021 Qbeast Analytics, S.L. | ||
*/ | ||
package io.qbeast.spark.internal.rules | ||
|
||
import io.qbeast.spark.internal.sources.catalog.QbeastCatalogUtils.isQbeastProvider | ||
import org.apache.spark.internal.Logging | ||
import org.apache.spark.sql.SparkSession | ||
import org.apache.spark.sql.catalyst.plans.logical.{ | ||
CreateTableAsSelect, | ||
LogicalPlan, | ||
ReplaceTableAsSelect | ||
} | ||
import org.apache.spark.sql.catalyst.rules.Rule | ||
|
||
/** | ||
* Rule class that enforces to pass all the write options to the Table Implementation | ||
* @param spark the SparkSession | ||
*/ | ||
class SaveAsTableRule(spark: SparkSession) extends Rule[LogicalPlan] with Logging { | ||
|
||
override def apply(plan: LogicalPlan): LogicalPlan = { | ||
// When CreateTableAsSelect statement is in place for qbeast | ||
// We need to pass the writeOptions as properties to the creation of the table | ||
// to make sure columnsToIndex is present | ||
plan transformDown { | ||
case saveAsSelect: CreateTableAsSelect if isQbeastProvider(saveAsSelect.properties) => | ||
val options = saveAsSelect.writeOptions | ||
val finalProperties = saveAsSelect.properties ++ options | ||
saveAsSelect.copy(properties = finalProperties) | ||
case replaceAsSelect: ReplaceTableAsSelect | ||
if isQbeastProvider(replaceAsSelect.properties) => | ||
val options = replaceAsSelect.writeOptions | ||
val finalProperties = replaceAsSelect.properties ++ options | ||
replaceAsSelect.copy(properties = finalProperties) | ||
} | ||
} | ||
|
||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.