Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

V1 ddl extended keyword #1455

Merged
merged 4 commits into from
May 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
161 changes: 137 additions & 24 deletions partiql-ast/api/partiql-ast.api

Large diffs are not rendered by default.

12 changes: 12 additions & 0 deletions partiql-ast/src/main/kotlin/org/partiql/ast/helpers/ToLegacyAst.kt
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,12 @@ private class AstTranslator(val metas: Map<String, MetaContainer>) : AstBaseVisi
}
val tableName = node.name.symbol
val def = node.definition?.let { visitTableDefinition(it, ctx) }
if (node.partitionBy != null) {
error("The legacy AST does not support Partition BY in create Table")
}
if (node.tableProperties.isNotEmpty()) {
error("The legacy AST does not support TBLProperties in create Table")
}
ddl(createTable(tableName, def), metas)
}

Expand Down Expand Up @@ -187,6 +193,12 @@ private class AstTranslator(val metas: Map<String, MetaContainer>) : AstBaseVisi
val name = node.name.symbol
val type = visitType(node.type, ctx)
val constraints = node.constraints.translate<PartiqlAst.ColumnConstraint>(ctx)
if (node.isOptional) {
error("The legacy AST does not support optional field declaration")
}
if (node.comment != null) {
error("The legacy AST does not support comment clause")
}
columnDeclaration(name, type, constraints, metas)
}

Expand Down
23 changes: 23 additions & 0 deletions partiql-ast/src/main/resources/partiql_ast.ion
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,12 @@ ddl_op::[
create_table::{
name: identifier,
definition: optional::table_definition,
// This is an expression for backward compatibility
// For now, we support `PARTITION BY (<id> (, <id>)* ))`
// But in the future, we might support additional partition expressions:
// such as `PARTITION BY RANGE(..)`
partition_by: optional::partition_by,
table_properties: list::[table_property],
},

// CREATE INDEX [<identifier>] ON <identifier> (<path> [, <path>]...)
Expand Down Expand Up @@ -233,6 +239,8 @@ type::[
// for struct subfield. But modeling this to be a list of constraints
// to prevent future breaking changes.
constraints: list::[constraint],
is_optional: bool,
comment: optional::string,
}
],
}, // STRUCT <fields>
Expand Down Expand Up @@ -829,6 +837,9 @@ table_definition::{
name: '.identifier.symbol',
type: '.type',
constraints: list::[constraint],
// TODO: Consider to model this as a constraint?
is_optional: bool,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe we can prepend this with a TODO to make discoverable later. Also, on syntax, do this matter to be a constraint or not?

Copy link
Contributor Author

@yliuuuu yliuuuu May 3, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think it matters on the syntax.

The only thing is that Attribute Constraint in SQL is often after the data type declaration, but this is more of a terminology thing.

comment: optional::string,
}
],
}
Expand All @@ -847,6 +858,18 @@ constraint::{
],
}

partition_by::[
attr_list :: {
list: list::['.identifier.symbol']
},
// We can add other commonly support Partition Expression like `range` later
]

table_property::{
name: string,
value: '.value',
}

// SQL-99 Table 11
datetime_field::[
YEAR, // 0001-9999
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -423,7 +423,9 @@ class ToLegacyAstTest {
fields += org.partiql.ast.typeStructField(
org.partiql.ast.identifierSymbol("a", Identifier.CaseSensitivity.INSENSITIVE),
typeInt2(),
emptyList()
emptyList(),
false,
null
)
}
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -246,6 +246,7 @@ internal class PartiQLPigVisitor(
throw ParserException("PIG Parser does not support qualified name as table name", ErrorCode.PARSE_UNEXPECTED_TOKEN)
}
val def = ctx.tableDef()?.let { visitTableDef(it) }
ctx.tableExtension().map { visit(it) }
createTable_(name, def, ctx.CREATE().getSourceMetaContainer())
}

Expand All @@ -264,6 +265,12 @@ internal class PartiQLPigVisitor(
val name = visitSymbolPrimitive(ctx.columnName().symbolPrimitive()).name.text
val type = visit(ctx.type()) as PartiqlAst.Type
val constrs = ctx.columnConstraint().map { visitColumnConstraint(it) }
if (ctx.OPTIONAL() != null) {
throw ParserException("PIG Parser does not support OPTIONAL Field", ErrorCode.PARSE_UNEXPECTED_TOKEN)
}
if (ctx.comment() != null) {
throw ParserException("PIG Parser does not support COMMENT Clause", ErrorCode.PARSE_UNEXPECTED_TOKEN)
}
columnDeclaration(name, type, constrs)
}

Expand All @@ -281,6 +288,12 @@ internal class PartiQLPigVisitor(
columnNull()
}

override fun visitTableConstrDeclaration(ctx: PartiQLParser.TableConstrDeclarationContext) = throw ParserException("PIG Parser does not support tuple level constraint", ErrorCode.PARSE_UNEXPECTED_TOKEN)

override fun visitTblExtensionPartition(ctx: PartiQLParser.TblExtensionPartitionContext) = throw ParserException("PIG Parser does not support PARTITION BY Clause", ErrorCode.PARSE_UNEXPECTED_TOKEN)

override fun visitTblExtensionTblProperties(ctx: PartiQLParser.TblExtensionTblPropertiesContext) = throw ParserException("PIG Parser does not support TBLPROPERTIES Clause", ErrorCode.PARSE_UNEXPECTED_TOKEN)

/**
*
* EXECUTE
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,44 @@ internal class PartiQLParserDDLTest : PartiQLParserTestBase() {
code = ErrorCode.PARSE_UNEXPECTED_TOKEN,
context = mapOf(),
),
ParserErrorTestCase(
description = "PIG Parser does not support OPTIONAL Attribute",
query = """
CREATE TABLE tbl (
a OPTIONAL INT2
)
""".trimIndent(),
code = ErrorCode.PARSE_UNEXPECTED_TOKEN,
context = mapOf(),
),
ParserErrorTestCase(
description = "PIG Parser does not support COMMENT keyword",
query = """
CREATE TABLE tbl (
a INT2 COMMENT 'this is a comment'
)
""".trimIndent(),
code = ErrorCode.PARSE_UNEXPECTED_TOKEN,
context = mapOf(),
),
ParserErrorTestCase(
description = "PIG Parser does not support PARTITION BY keyword",
query = """
CREATE TABLE tbl
PARTITION BY (a, b)
""".trimIndent(),
code = ErrorCode.PARSE_UNEXPECTED_TOKEN,
context = mapOf(),
),
ParserErrorTestCase(
description = "PIG Parser does not support TBLPROPERTIES keyword",
query = """
CREATE TABLE tbl
TBLPROPERTIES ('k1' = 'v1')
""".trimIndent(),
code = ErrorCode.PARSE_UNEXPECTED_TOKEN,
context = mapOf(),
),

// Putting those tests here are they are impacted by DDL implementation
ParserErrorTestCase(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1067,7 +1067,7 @@ class PartiQLParserMatchTest : PartiQLParserTestBase() {

@Test
fun prefilters() = assertExpression(
"SELECT u as banCandidate FROM g MATCH (p:Post Where p.isFlagged = true) <-[:createdPost]- (u:Usr WHERE u.isBanned = false AND u.karma < 20) -[:createdComment]->(c:Comment WHERE c.isFlagged = true) WHERE p.title LIKE '%considered harmful%'",
"SELECT u as banCandidate FROM g MATCH (p:Post Where p.isFlagged = true) <-[:createdPost]- (u:Usr WHERE u.isBanned = false AND u.karma < 20) -[:createdComment]->(c:\"Comment\" WHERE c.isFlagged = true) WHERE p.title LIKE '%considered harmful%'",
) {
PartiqlAst.build {
select(
Expand Down
21 changes: 18 additions & 3 deletions partiql-parser/src/main/antlr/PartiQL.g4
Original file line number Diff line number Diff line change
Expand Up @@ -77,14 +77,15 @@ qualifiedName : (qualifier+=symbolPrimitive PERIOD)* name=symbolPrimitive;
tableName : symbolPrimitive;
columnName : symbolPrimitive;
constraintName : symbolPrimitive;
comment : COMMENT LITERAL_STRING;

ddl
: createCommand
| dropCommand
;

createCommand
: CREATE TABLE qualifiedName ( PAREN_LEFT tableDef PAREN_RIGHT )? # CreateTable
: CREATE TABLE qualifiedName ( PAREN_LEFT tableDef PAREN_RIGHT )? tableExtension* # CreateTable
| CREATE INDEX ON symbolPrimitive PAREN_LEFT pathSimple ( COMMA pathSimple )* PAREN_RIGHT # CreateIndex
;

Expand All @@ -98,7 +99,7 @@ tableDef
;

tableDefPart
: columnName type columnConstraint* # ColumnDeclaration
: columnName OPTIONAL? type columnConstraint* comment? # ColumnDeclaration
| ( CONSTRAINT constraintName )? tableConstraintDef # TableConstrDeclaration
;

Expand Down Expand Up @@ -136,6 +137,20 @@ uniqueConstraintDef
// but we at least can eliminate SFW query here.
searchCondition : exprOr;

// SQL/HIVE DDL Extension, Support additional table metadatas such as partition by, tblProperties, etc.
tableExtension
: PARTITION BY partitionBy # TblExtensionPartition
| TBLPROPERTIES PAREN_LEFT keyValuePair (COMMA keyValuePair)* PAREN_RIGHT # TblExtensionTblProperties
;

// Limiting the scope to only allow String as valid value for now
keyValuePair : key=LITERAL_STRING EQ value=LITERAL_STRING;

// For now: just support a list of column name
// In the future, we might support common partition expression such as Hash(), Range(), etc.
partitionBy
: PAREN_LEFT columnName (COMMA columnName)* PAREN_RIGHT #PartitionColList
;
/**
*
* DATA MANIPULATION LANGUAGE (DML)
Expand Down Expand Up @@ -836,5 +851,5 @@ type
;

structField
: columnName COLON type columnConstraint*
: columnName OPTIONAL? COLON type columnConstraint* comment?
;
3 changes: 3 additions & 0 deletions partiql-parser/src/main/antlr/PartiQLTokens.g4
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ COALESCE: 'COALESCE';
COLLATE: 'COLLATE';
COLLATION: 'COLLATION';
COLUMN: 'COLUMN';
COMMENT: 'COMMENT';
COMMIT: 'COMMIT';
CONNECT: 'CONNECT';
CONNECTION: 'CONNECTION';
Expand Down Expand Up @@ -175,6 +176,7 @@ ON: 'ON';
ONLY: 'ONLY';
OPEN: 'OPEN';
OPTION: 'OPTION';
OPTIONAL: 'OPTIONAL';
OR: 'OR';
ORDER: 'ORDER';
OUTER: 'OUTER';
Expand Down Expand Up @@ -287,6 +289,7 @@ MODIFIED: 'MODIFIED';
NEW: 'NEW';
OLD: 'OLD';
NOTHING: 'NOTHING';
TBLPROPERTIES: 'TBLPROPERTIES';

/**
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ import org.partiql.ast.GroupBy
import org.partiql.ast.Identifier
import org.partiql.ast.Let
import org.partiql.ast.OnConflict
import org.partiql.ast.PartitionBy
import org.partiql.ast.Path
import org.partiql.ast.Returning
import org.partiql.ast.Select
Expand Down Expand Up @@ -139,6 +140,7 @@ import org.partiql.ast.onConflictActionDoUpdate
import org.partiql.ast.onConflictTargetConstraint
import org.partiql.ast.onConflictTargetSymbols
import org.partiql.ast.orderBy
import org.partiql.ast.partitionByAttrList
import org.partiql.ast.path
import org.partiql.ast.pathStepIndex
import org.partiql.ast.pathStepSymbol
Expand Down Expand Up @@ -176,6 +178,7 @@ import org.partiql.ast.statementExplainTargetDomain
import org.partiql.ast.statementQuery
import org.partiql.ast.tableDefinition
import org.partiql.ast.tableDefinitionAttribute
import org.partiql.ast.tableProperty
import org.partiql.ast.typeAny
import org.partiql.ast.typeArray
import org.partiql.ast.typeBag
Expand Down Expand Up @@ -610,7 +613,26 @@ internal class PartiQLParserDefault : PartiQLParser {
override fun visitCreateTable(ctx: GeneratedParser.CreateTableContext) = translate(ctx) {
val table = visitQualifiedName(ctx.qualifiedName())
val definition = ctx.tableDef()?.let { visitTableDef(it) }
ddlOpCreateTable(table, definition)
val partitionBy = ctx
.tableExtension()
.filterIsInstance<GeneratedParser.TblExtensionPartitionContext>()
.let {
if (it.size > 1) throw error(ctx, "Expect one PARTITION BY clause.")
it.firstOrNull()?.let { visitTblExtensionPartition(it) }
}
val tblProperties = ctx
.tableExtension()
.filterIsInstance<GeneratedParser.TblExtensionTblPropertiesContext>()
.let {
if (it.size > 1) throw error(ctx, "Expect one TBLPROPERTIES clause.")
val tblPropertiesCtx = it.firstOrNull()
tblPropertiesCtx?.keyValuePair()?.map {
val key = it.key.getStringValue()
val value = it.value.getStringValue()
tableProperty(key, stringValue(value))
} ?: emptyList()
}
ddlOpCreateTable(table, definition, partitionBy, tblProperties)
}

override fun visitCreateIndex(ctx: GeneratedParser.CreateIndexContext) = translate(ctx) {
Expand Down Expand Up @@ -640,7 +662,12 @@ internal class PartiQLParserDefault : PartiQLParser {
isValidTypeDeclarationOrThrow(it, ctx.type())
}
val constraints = ctx.columnConstraint().map { visitColumnConstraint(it) }
tableDefinitionAttribute(name, type, constraints)
val optional = when (ctx.OPTIONAL()) {
null -> false
else -> true
}
val comment = ctx.comment()?.LITERAL_STRING()?.getStringValue()
tableDefinitionAttribute(name, type, constraints, optional, comment)
}

/**
Expand Down Expand Up @@ -710,6 +737,13 @@ internal class PartiQLParserDefault : PartiQLParser {
constraint(identifier, body)
}

override fun visitTblExtensionPartition(ctx: GeneratedParser.TblExtensionPartitionContext) =
ctx.partitionBy().accept(this) as PartitionBy

override fun visitPartitionColList(ctx: GeneratedParser.PartitionColListContext) = translate(ctx) {
partitionByAttrList(ctx.columnName().map { visitAs<Identifier.Symbol> (it.symbolPrimitive()) })
}

/**
*
* EXECUTE
Expand Down Expand Up @@ -2255,8 +2289,13 @@ internal class PartiQLParserDefault : PartiQLParser {
else -> throw error(it, "Only NULL Constraint and NOT NULL Constraint are allowed in Struct field")
}
}
val optional = when (structFieldCtx.OPTIONAL()) {
null -> false
else -> true
}
val comment = structFieldCtx.comment()?.LITERAL_STRING()?.getStringValue()

typeStructField(name, type, constraints)
typeStructField(name, type, constraints, optional, comment)
}
typeStruct(fields)
}
Expand Down
Loading
Loading